var/home/core/zuul-output/0000755000175000017500000000000015067705466014545 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015067716012015476 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004676210315067716003017713 0ustar rootrootOct 03 08:40:24 crc systemd[1]: Starting Kubernetes Kubelet... Oct 03 08:40:24 crc restorecon[4672]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 08:40:24 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 08:40:25 crc restorecon[4672]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 08:40:25 crc restorecon[4672]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 03 08:40:26 crc kubenswrapper[4899]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 03 08:40:26 crc kubenswrapper[4899]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 03 08:40:26 crc kubenswrapper[4899]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 03 08:40:26 crc kubenswrapper[4899]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 03 08:40:26 crc kubenswrapper[4899]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 03 08:40:26 crc kubenswrapper[4899]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.321324 4899 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326852 4899 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326871 4899 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326876 4899 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326879 4899 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326883 4899 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326901 4899 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326905 4899 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326923 4899 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326927 4899 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326932 4899 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326935 4899 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326939 4899 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326943 4899 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326946 4899 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326950 4899 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326955 4899 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326960 4899 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326963 4899 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326967 4899 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326971 4899 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326974 4899 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326978 4899 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326981 4899 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326985 4899 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326988 4899 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326992 4899 feature_gate.go:330] unrecognized feature gate: Example Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326995 4899 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.326999 4899 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327003 4899 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327008 4899 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327011 4899 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327015 4899 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327019 4899 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327023 4899 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327027 4899 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327030 4899 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327034 4899 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327038 4899 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327041 4899 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327045 4899 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327048 4899 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327053 4899 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327056 4899 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327065 4899 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327070 4899 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327074 4899 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327079 4899 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327082 4899 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327086 4899 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327089 4899 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327093 4899 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327096 4899 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327099 4899 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327103 4899 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327106 4899 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327110 4899 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327113 4899 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327116 4899 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327120 4899 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327123 4899 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327126 4899 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327130 4899 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327134 4899 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327137 4899 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327140 4899 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327144 4899 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327147 4899 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327152 4899 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327156 4899 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327161 4899 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.327165 4899 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.327884 4899 flags.go:64] FLAG: --address="0.0.0.0" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.327912 4899 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.327931 4899 flags.go:64] FLAG: --anonymous-auth="true" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.327938 4899 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.327944 4899 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.327948 4899 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.327954 4899 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.327960 4899 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.327971 4899 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.327976 4899 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.327980 4899 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.327985 4899 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.327989 4899 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.327993 4899 flags.go:64] FLAG: --cgroup-root="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.327997 4899 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328001 4899 flags.go:64] FLAG: --client-ca-file="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328004 4899 flags.go:64] FLAG: --cloud-config="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328008 4899 flags.go:64] FLAG: --cloud-provider="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328012 4899 flags.go:64] FLAG: --cluster-dns="[]" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328020 4899 flags.go:64] FLAG: --cluster-domain="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328023 4899 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328027 4899 flags.go:64] FLAG: --config-dir="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328031 4899 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328036 4899 flags.go:64] FLAG: --container-log-max-files="5" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328048 4899 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328052 4899 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328056 4899 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328060 4899 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328064 4899 flags.go:64] FLAG: --contention-profiling="false" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328068 4899 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328072 4899 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328076 4899 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328082 4899 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328087 4899 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328092 4899 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328096 4899 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328100 4899 flags.go:64] FLAG: --enable-load-reader="false" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328104 4899 flags.go:64] FLAG: --enable-server="true" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328108 4899 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328117 4899 flags.go:64] FLAG: --event-burst="100" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328121 4899 flags.go:64] FLAG: --event-qps="50" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328125 4899 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328129 4899 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328133 4899 flags.go:64] FLAG: --eviction-hard="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328144 4899 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328148 4899 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328153 4899 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328157 4899 flags.go:64] FLAG: --eviction-soft="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328161 4899 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328164 4899 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328168 4899 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328172 4899 flags.go:64] FLAG: --experimental-mounter-path="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328176 4899 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328181 4899 flags.go:64] FLAG: --fail-swap-on="true" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328185 4899 flags.go:64] FLAG: --feature-gates="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328190 4899 flags.go:64] FLAG: --file-check-frequency="20s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328194 4899 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328199 4899 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328203 4899 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328207 4899 flags.go:64] FLAG: --healthz-port="10248" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328211 4899 flags.go:64] FLAG: --help="false" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328215 4899 flags.go:64] FLAG: --hostname-override="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328219 4899 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328223 4899 flags.go:64] FLAG: --http-check-frequency="20s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328227 4899 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328234 4899 flags.go:64] FLAG: --image-credential-provider-config="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328239 4899 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328242 4899 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328246 4899 flags.go:64] FLAG: --image-service-endpoint="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328250 4899 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328254 4899 flags.go:64] FLAG: --kube-api-burst="100" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328258 4899 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328263 4899 flags.go:64] FLAG: --kube-api-qps="50" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328267 4899 flags.go:64] FLAG: --kube-reserved="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328271 4899 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328275 4899 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328279 4899 flags.go:64] FLAG: --kubelet-cgroups="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328283 4899 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328287 4899 flags.go:64] FLAG: --lock-file="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328291 4899 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328300 4899 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328305 4899 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328311 4899 flags.go:64] FLAG: --log-json-split-stream="false" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328315 4899 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328318 4899 flags.go:64] FLAG: --log-text-split-stream="false" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328322 4899 flags.go:64] FLAG: --logging-format="text" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328326 4899 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328331 4899 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328334 4899 flags.go:64] FLAG: --manifest-url="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328338 4899 flags.go:64] FLAG: --manifest-url-header="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328344 4899 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328348 4899 flags.go:64] FLAG: --max-open-files="1000000" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328353 4899 flags.go:64] FLAG: --max-pods="110" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328357 4899 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328361 4899 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328364 4899 flags.go:64] FLAG: --memory-manager-policy="None" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328369 4899 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328376 4899 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328380 4899 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328384 4899 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328393 4899 flags.go:64] FLAG: --node-status-max-images="50" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328397 4899 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328401 4899 flags.go:64] FLAG: --oom-score-adj="-999" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328405 4899 flags.go:64] FLAG: --pod-cidr="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328409 4899 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328416 4899 flags.go:64] FLAG: --pod-manifest-path="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328420 4899 flags.go:64] FLAG: --pod-max-pids="-1" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328425 4899 flags.go:64] FLAG: --pods-per-core="0" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328429 4899 flags.go:64] FLAG: --port="10250" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328433 4899 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328437 4899 flags.go:64] FLAG: --provider-id="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328442 4899 flags.go:64] FLAG: --qos-reserved="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328446 4899 flags.go:64] FLAG: --read-only-port="10255" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328450 4899 flags.go:64] FLAG: --register-node="true" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328454 4899 flags.go:64] FLAG: --register-schedulable="true" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328458 4899 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328471 4899 flags.go:64] FLAG: --registry-burst="10" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328475 4899 flags.go:64] FLAG: --registry-qps="5" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328479 4899 flags.go:64] FLAG: --reserved-cpus="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328483 4899 flags.go:64] FLAG: --reserved-memory="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328488 4899 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328492 4899 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328496 4899 flags.go:64] FLAG: --rotate-certificates="false" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328500 4899 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328504 4899 flags.go:64] FLAG: --runonce="false" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328508 4899 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328512 4899 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328516 4899 flags.go:64] FLAG: --seccomp-default="false" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328520 4899 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328526 4899 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328530 4899 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328534 4899 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328538 4899 flags.go:64] FLAG: --storage-driver-password="root" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328542 4899 flags.go:64] FLAG: --storage-driver-secure="false" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328547 4899 flags.go:64] FLAG: --storage-driver-table="stats" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328550 4899 flags.go:64] FLAG: --storage-driver-user="root" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328554 4899 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328558 4899 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328563 4899 flags.go:64] FLAG: --system-cgroups="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328566 4899 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328572 4899 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328576 4899 flags.go:64] FLAG: --tls-cert-file="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328580 4899 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328587 4899 flags.go:64] FLAG: --tls-min-version="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328591 4899 flags.go:64] FLAG: --tls-private-key-file="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328595 4899 flags.go:64] FLAG: --topology-manager-policy="none" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328599 4899 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328603 4899 flags.go:64] FLAG: --topology-manager-scope="container" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328607 4899 flags.go:64] FLAG: --v="2" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328613 4899 flags.go:64] FLAG: --version="false" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328618 4899 flags.go:64] FLAG: --vmodule="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328623 4899 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.328632 4899 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328769 4899 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328775 4899 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328780 4899 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328785 4899 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328790 4899 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328794 4899 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328798 4899 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328802 4899 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328808 4899 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328811 4899 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328815 4899 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328819 4899 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328822 4899 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328827 4899 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328831 4899 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328836 4899 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328840 4899 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328844 4899 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328848 4899 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328852 4899 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328855 4899 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328859 4899 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328863 4899 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328867 4899 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328870 4899 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328873 4899 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328877 4899 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328880 4899 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328884 4899 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328905 4899 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328909 4899 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328912 4899 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328916 4899 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328920 4899 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328925 4899 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328939 4899 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328944 4899 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328948 4899 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328951 4899 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328955 4899 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328962 4899 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328966 4899 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328970 4899 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328974 4899 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328977 4899 feature_gate.go:330] unrecognized feature gate: Example Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328980 4899 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328984 4899 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328988 4899 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328991 4899 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328995 4899 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.328998 4899 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329001 4899 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329005 4899 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329008 4899 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329012 4899 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329015 4899 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329018 4899 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329022 4899 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329025 4899 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329029 4899 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329032 4899 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329035 4899 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329039 4899 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329042 4899 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329046 4899 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329049 4899 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329053 4899 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329056 4899 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329060 4899 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329063 4899 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.329067 4899 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.329084 4899 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.338151 4899 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.338189 4899 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338269 4899 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338279 4899 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338283 4899 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338289 4899 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338294 4899 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338298 4899 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338302 4899 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338307 4899 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338312 4899 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338318 4899 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338324 4899 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338329 4899 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338334 4899 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338339 4899 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338343 4899 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338349 4899 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338354 4899 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338359 4899 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338367 4899 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338375 4899 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338380 4899 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338385 4899 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338389 4899 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338394 4899 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338400 4899 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338404 4899 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338409 4899 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338413 4899 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338417 4899 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338423 4899 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338427 4899 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338432 4899 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338437 4899 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338442 4899 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338446 4899 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338452 4899 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338457 4899 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338462 4899 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338466 4899 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338471 4899 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338475 4899 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338480 4899 feature_gate.go:330] unrecognized feature gate: Example Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338484 4899 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338489 4899 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338493 4899 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338498 4899 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338502 4899 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338507 4899 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338511 4899 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338516 4899 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338522 4899 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338527 4899 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338566 4899 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338571 4899 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338577 4899 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338582 4899 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338587 4899 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338591 4899 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338596 4899 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338600 4899 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338604 4899 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338609 4899 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338614 4899 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338618 4899 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338623 4899 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338628 4899 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338634 4899 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338639 4899 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338644 4899 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338649 4899 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338654 4899 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.338662 4899 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338814 4899 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338823 4899 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338828 4899 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338832 4899 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338836 4899 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338841 4899 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338847 4899 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338851 4899 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338854 4899 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338858 4899 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338862 4899 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338866 4899 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338870 4899 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338874 4899 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338877 4899 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338881 4899 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338884 4899 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338903 4899 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338907 4899 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338912 4899 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338915 4899 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338919 4899 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338922 4899 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338926 4899 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338930 4899 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338936 4899 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338940 4899 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338944 4899 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338948 4899 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338952 4899 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338956 4899 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338960 4899 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338964 4899 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338968 4899 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338971 4899 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338975 4899 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338979 4899 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338990 4899 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338995 4899 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.338998 4899 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339002 4899 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339006 4899 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339009 4899 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339013 4899 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339017 4899 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339020 4899 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339025 4899 feature_gate.go:330] unrecognized feature gate: Example Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339029 4899 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339034 4899 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339038 4899 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339042 4899 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339048 4899 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339054 4899 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339058 4899 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339062 4899 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339067 4899 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339071 4899 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339076 4899 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339080 4899 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339084 4899 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339088 4899 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339092 4899 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339095 4899 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339100 4899 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339103 4899 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339107 4899 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339112 4899 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339116 4899 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339120 4899 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339124 4899 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.339128 4899 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.339135 4899 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.339309 4899 server.go:940] "Client rotation is on, will bootstrap in background" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.343192 4899 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.343269 4899 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.347969 4899 server.go:997] "Starting client certificate rotation" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.348007 4899 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.348186 4899 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-15 02:30:49.932082433 +0000 UTC Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.348269 4899 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1025h50m23.583815451s for next certificate rotation Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.372198 4899 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.375360 4899 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.393475 4899 log.go:25] "Validated CRI v1 runtime API" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.421052 4899 log.go:25] "Validated CRI v1 image API" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.428073 4899 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.433819 4899 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-03-08-36-19-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.433856 4899 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.450494 4899 manager.go:217] Machine: {Timestamp:2025-10-03 08:40:26.446332375 +0000 UTC m=+0.553817328 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799886 MemoryCapacity:33654120448 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:bb3940af-b89c-4a7a-b7ee-d19044192ef2 BootID:557489d0-c981-4ed2-aac9-e59d234411ae Filesystems:[{Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:75:73:d1 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:75:73:d1 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:af:1b:36 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:10:ba:23 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:c5:c3:04 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:fe:15:9b Speed:-1 Mtu:1496} {Name:eth10 MacAddress:36:f4:bf:26:9e:f2 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:ce:40:b9:29:d9:15 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654120448 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.450775 4899 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.450942 4899 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.453990 4899 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.454168 4899 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.454205 4899 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.454382 4899 topology_manager.go:138] "Creating topology manager with none policy" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.454391 4899 container_manager_linux.go:303] "Creating device plugin manager" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.455148 4899 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.455176 4899 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.456132 4899 state_mem.go:36] "Initialized new in-memory state store" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.456227 4899 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.460565 4899 kubelet.go:418] "Attempting to sync node with API server" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.460599 4899 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.460637 4899 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.460652 4899 kubelet.go:324] "Adding apiserver pod source" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.460665 4899 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.469447 4899 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.217:6443: connect: connection refused Oct 03 08:40:26 crc kubenswrapper[4899]: E1003 08:40:26.469559 4899 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.217:6443: connect: connection refused" logger="UnhandledError" Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.469444 4899 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.217:6443: connect: connection refused Oct 03 08:40:26 crc kubenswrapper[4899]: E1003 08:40:26.469609 4899 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.217:6443: connect: connection refused" logger="UnhandledError" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.472924 4899 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.473996 4899 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.475394 4899 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.476588 4899 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.476690 4899 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.476768 4899 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.476818 4899 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.476870 4899 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.476956 4899 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.477038 4899 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.477107 4899 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.477170 4899 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.477219 4899 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.477269 4899 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.477321 4899 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.478119 4899 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.478578 4899 server.go:1280] "Started kubelet" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.479273 4899 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.479277 4899 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.479773 4899 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.217:6443: connect: connection refused Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.480333 4899 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 03 08:40:26 crc systemd[1]: Started Kubernetes Kubelet. Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.481211 4899 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.481241 4899 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.481339 4899 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 14:18:57.270225776 +0000 UTC Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.481378 4899 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 2309h38m30.788849554s for next certificate rotation Oct 03 08:40:26 crc kubenswrapper[4899]: E1003 08:40:26.481453 4899 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.481505 4899 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.481512 4899 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.481588 4899 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.482917 4899 factory.go:55] Registering systemd factory Oct 03 08:40:26 crc kubenswrapper[4899]: E1003 08:40:26.483398 4899 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.217:6443: connect: connection refused" interval="200ms" Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.483915 4899 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.217:6443: connect: connection refused Oct 03 08:40:26 crc kubenswrapper[4899]: E1003 08:40:26.484000 4899 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.217:6443: connect: connection refused" logger="UnhandledError" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.484663 4899 factory.go:221] Registration of the systemd container factory successfully Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.488049 4899 factory.go:153] Registering CRI-O factory Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.488071 4899 factory.go:221] Registration of the crio container factory successfully Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.488130 4899 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.488149 4899 factory.go:103] Registering Raw factory Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.488164 4899 manager.go:1196] Started watching for new ooms in manager Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.488602 4899 manager.go:319] Starting recovery of all containers Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.490744 4899 server.go:460] "Adding debug handlers to kubelet server" Oct 03 08:40:26 crc kubenswrapper[4899]: E1003 08:40:26.488031 4899 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.129.56.217:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186aee7d535d666f default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-03 08:40:26.478552687 +0000 UTC m=+0.586037630,LastTimestamp:2025-10-03 08:40:26.478552687 +0000 UTC m=+0.586037630,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493555 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493618 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493634 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493656 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493669 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493685 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493697 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493708 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493722 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493735 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493745 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493757 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493768 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493781 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493791 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493801 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493811 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493820 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493830 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493840 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493850 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493859 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493869 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493882 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493935 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493961 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493974 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493986 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.493996 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494006 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494016 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494030 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494041 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494051 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494063 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494075 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494083 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494092 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494103 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494113 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494123 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494134 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494143 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494153 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494163 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494173 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494203 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494215 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494225 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494235 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494244 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494253 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494269 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494279 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494291 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494321 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494334 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494346 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494364 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494375 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494384 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494396 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494413 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494436 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494448 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494459 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494479 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494505 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494523 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494533 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494542 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494552 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494562 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494571 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494581 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494664 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494678 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494690 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494703 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494719 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494732 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494744 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494756 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494768 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494780 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494794 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494809 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494822 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494837 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494850 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494875 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494911 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494929 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494943 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494959 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.494978 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498142 4899 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498187 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498203 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498217 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498232 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498246 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498260 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498273 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498287 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498315 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498349 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498366 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498383 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498396 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498410 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498426 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498438 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498450 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498461 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498471 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498481 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498491 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498501 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498512 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498521 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498532 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498542 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498552 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498564 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498575 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498586 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498598 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498648 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498660 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498671 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498682 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498694 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498705 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498715 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498725 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498736 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498746 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498756 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498766 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498777 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498788 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498803 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498817 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498827 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498837 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498853 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498864 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498874 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498886 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498913 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498928 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498940 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498950 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498961 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498971 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498982 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.498992 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499004 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499017 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499029 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499042 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499052 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499063 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499074 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499084 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499095 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499105 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499115 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499125 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499135 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499156 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499166 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499177 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499189 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499200 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499211 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499225 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499235 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499245 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499255 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499266 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499275 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499286 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499296 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499305 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499316 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499327 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499339 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499389 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499420 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499434 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499444 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499457 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499471 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499481 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499490 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499501 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499511 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499522 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499531 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499541 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499552 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499561 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499573 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499590 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499602 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499617 4899 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499628 4899 reconstruct.go:97] "Volume reconstruction finished" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.499635 4899 reconciler.go:26] "Reconciler: start to sync state" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.506944 4899 manager.go:324] Recovery completed Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.515579 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.517146 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.517270 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.517329 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.518324 4899 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.518345 4899 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.518373 4899 state_mem.go:36] "Initialized new in-memory state store" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.523652 4899 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.525287 4899 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.525386 4899 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.525625 4899 kubelet.go:2335] "Starting kubelet main sync loop" Oct 03 08:40:26 crc kubenswrapper[4899]: E1003 08:40:26.525751 4899 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 03 08:40:26 crc kubenswrapper[4899]: W1003 08:40:26.527367 4899 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.217:6443: connect: connection refused Oct 03 08:40:26 crc kubenswrapper[4899]: E1003 08:40:26.527445 4899 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.217:6443: connect: connection refused" logger="UnhandledError" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.532307 4899 policy_none.go:49] "None policy: Start" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.533343 4899 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.533421 4899 state_mem.go:35] "Initializing new in-memory state store" Oct 03 08:40:26 crc kubenswrapper[4899]: E1003 08:40:26.582468 4899 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.596274 4899 manager.go:334] "Starting Device Plugin manager" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.596760 4899 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.596784 4899 server.go:79] "Starting device plugin registration server" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.597526 4899 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.597554 4899 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.597811 4899 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.597970 4899 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.597978 4899 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 03 08:40:26 crc kubenswrapper[4899]: E1003 08:40:26.606792 4899 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.625987 4899 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.626166 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.627503 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.627550 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.627561 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.627760 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.628143 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.628302 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.629654 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.629696 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.629718 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.630040 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.630819 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.630875 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.631454 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.631500 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.631517 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.631966 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.632185 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.632255 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.632277 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.633009 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.633050 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.633563 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.634396 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.634433 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.634998 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.635028 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.635038 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.635189 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.635261 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.635299 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.635318 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.635504 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.635581 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.636017 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.636061 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.636073 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.636512 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.636557 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.637029 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.637080 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.637108 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.638008 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.638733 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.638780 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:26 crc kubenswrapper[4899]: E1003 08:40:26.684146 4899 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.217:6443: connect: connection refused" interval="400ms" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.698155 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.699551 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.699618 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.699635 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.699678 4899 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 08:40:26 crc kubenswrapper[4899]: E1003 08:40:26.700433 4899 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.217:6443: connect: connection refused" node="crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.700989 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.701026 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.701049 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.701072 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.701090 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.701133 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.701164 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.701240 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.701285 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.701322 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.701339 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.701352 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.701401 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.701414 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.701427 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.802696 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803137 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803162 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803185 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803203 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803233 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803251 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803270 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803312 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803335 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803354 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803374 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803393 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803411 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803443 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803615 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803661 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803691 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803707 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803742 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803772 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.802942 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803847 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803938 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.803991 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.804049 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.804047 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.804109 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.804134 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.804166 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.901093 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.902703 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.902790 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.902871 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.902940 4899 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 08:40:26 crc kubenswrapper[4899]: E1003 08:40:26.903739 4899 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.217:6443: connect: connection refused" node="crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.972795 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 03 08:40:26 crc kubenswrapper[4899]: I1003 08:40:26.993575 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 08:40:27 crc kubenswrapper[4899]: I1003 08:40:27.000531 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 08:40:27 crc kubenswrapper[4899]: W1003 08:40:27.018647 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-3d89e9c3906e2dc070a9db33edafda5032707fbb54458d6f4267696ab3484a85 WatchSource:0}: Error finding container 3d89e9c3906e2dc070a9db33edafda5032707fbb54458d6f4267696ab3484a85: Status 404 returned error can't find the container with id 3d89e9c3906e2dc070a9db33edafda5032707fbb54458d6f4267696ab3484a85 Oct 03 08:40:27 crc kubenswrapper[4899]: I1003 08:40:27.018787 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 08:40:27 crc kubenswrapper[4899]: I1003 08:40:27.024732 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 08:40:27 crc kubenswrapper[4899]: W1003 08:40:27.039485 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-c342b96572f8f913a9f725fc6222dffb159ab94d374c71680a4deaaa2623d95a WatchSource:0}: Error finding container c342b96572f8f913a9f725fc6222dffb159ab94d374c71680a4deaaa2623d95a: Status 404 returned error can't find the container with id c342b96572f8f913a9f725fc6222dffb159ab94d374c71680a4deaaa2623d95a Oct 03 08:40:27 crc kubenswrapper[4899]: W1003 08:40:27.046744 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-0d38440a775e1eec186f51c4101f909ba30724657396e7d5dea9dcb2e934351f WatchSource:0}: Error finding container 0d38440a775e1eec186f51c4101f909ba30724657396e7d5dea9dcb2e934351f: Status 404 returned error can't find the container with id 0d38440a775e1eec186f51c4101f909ba30724657396e7d5dea9dcb2e934351f Oct 03 08:40:27 crc kubenswrapper[4899]: W1003 08:40:27.047652 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-b143f13793de73cad2ae013d05fdfdb250b4a5064443f5749c1ed2543e83d9ca WatchSource:0}: Error finding container b143f13793de73cad2ae013d05fdfdb250b4a5064443f5749c1ed2543e83d9ca: Status 404 returned error can't find the container with id b143f13793de73cad2ae013d05fdfdb250b4a5064443f5749c1ed2543e83d9ca Oct 03 08:40:27 crc kubenswrapper[4899]: E1003 08:40:27.085415 4899 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.217:6443: connect: connection refused" interval="800ms" Oct 03 08:40:27 crc kubenswrapper[4899]: I1003 08:40:27.304371 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:27 crc kubenswrapper[4899]: I1003 08:40:27.306384 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:27 crc kubenswrapper[4899]: I1003 08:40:27.306443 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:27 crc kubenswrapper[4899]: I1003 08:40:27.306458 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:27 crc kubenswrapper[4899]: I1003 08:40:27.306488 4899 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 08:40:27 crc kubenswrapper[4899]: E1003 08:40:27.306999 4899 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.217:6443: connect: connection refused" node="crc" Oct 03 08:40:27 crc kubenswrapper[4899]: W1003 08:40:27.378395 4899 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.217:6443: connect: connection refused Oct 03 08:40:27 crc kubenswrapper[4899]: E1003 08:40:27.378478 4899 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.217:6443: connect: connection refused" logger="UnhandledError" Oct 03 08:40:27 crc kubenswrapper[4899]: W1003 08:40:27.420760 4899 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.217:6443: connect: connection refused Oct 03 08:40:27 crc kubenswrapper[4899]: E1003 08:40:27.420862 4899 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.217:6443: connect: connection refused" logger="UnhandledError" Oct 03 08:40:27 crc kubenswrapper[4899]: I1003 08:40:27.481169 4899 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.217:6443: connect: connection refused Oct 03 08:40:27 crc kubenswrapper[4899]: I1003 08:40:27.530774 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"b143f13793de73cad2ae013d05fdfdb250b4a5064443f5749c1ed2543e83d9ca"} Oct 03 08:40:27 crc kubenswrapper[4899]: I1003 08:40:27.532288 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0d38440a775e1eec186f51c4101f909ba30724657396e7d5dea9dcb2e934351f"} Oct 03 08:40:27 crc kubenswrapper[4899]: I1003 08:40:27.533404 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"c342b96572f8f913a9f725fc6222dffb159ab94d374c71680a4deaaa2623d95a"} Oct 03 08:40:27 crc kubenswrapper[4899]: I1003 08:40:27.534438 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3d89e9c3906e2dc070a9db33edafda5032707fbb54458d6f4267696ab3484a85"} Oct 03 08:40:27 crc kubenswrapper[4899]: I1003 08:40:27.535342 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"013169b628b4f9630ed8164b870507096b40dc77d26f9627f36142097d6e4ec8"} Oct 03 08:40:27 crc kubenswrapper[4899]: W1003 08:40:27.811197 4899 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.217:6443: connect: connection refused Oct 03 08:40:27 crc kubenswrapper[4899]: E1003 08:40:27.811408 4899 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.217:6443: connect: connection refused" logger="UnhandledError" Oct 03 08:40:27 crc kubenswrapper[4899]: E1003 08:40:27.886505 4899 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.217:6443: connect: connection refused" interval="1.6s" Oct 03 08:40:27 crc kubenswrapper[4899]: W1003 08:40:27.974542 4899 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.217:6443: connect: connection refused Oct 03 08:40:27 crc kubenswrapper[4899]: E1003 08:40:27.974602 4899 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.217:6443: connect: connection refused" logger="UnhandledError" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.107479 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.109322 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.109362 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.109372 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.109394 4899 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 08:40:28 crc kubenswrapper[4899]: E1003 08:40:28.109841 4899 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.217:6443: connect: connection refused" node="crc" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.481044 4899 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.217:6443: connect: connection refused Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.539685 4899 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d" exitCode=0 Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.539748 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d"} Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.539779 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.540657 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.540683 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.540692 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.541746 4899 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276" exitCode=0 Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.541794 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276"} Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.541902 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.542738 4899 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="ba16d72a68e73e000a876907e9dea4e5ad713c3bf0b4b6478d4fde2efedda458" exitCode=0 Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.542795 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.542778 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"ba16d72a68e73e000a876907e9dea4e5ad713c3bf0b4b6478d4fde2efedda458"} Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.543210 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.543248 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.543278 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.547074 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.547103 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.547115 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.547463 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.548387 4899 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="9f8aa155ea687fce4c2d31b71921835abd2cce4e22801400914a2fb98f573ba1" exitCode=0 Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.548424 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.548450 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.548453 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"9f8aa155ea687fce4c2d31b71921835abd2cce4e22801400914a2fb98f573ba1"} Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.548461 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.548507 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.549243 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.549272 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.549283 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.553856 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b"} Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.553921 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4"} Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.553937 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460"} Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.553945 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.553949 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073"} Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.554764 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.554787 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:28 crc kubenswrapper[4899]: I1003 08:40:28.554796 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:29 crc kubenswrapper[4899]: W1003 08:40:29.069027 4899 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.217:6443: connect: connection refused Oct 03 08:40:29 crc kubenswrapper[4899]: E1003 08:40:29.069110 4899 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.217:6443: connect: connection refused" logger="UnhandledError" Oct 03 08:40:29 crc kubenswrapper[4899]: W1003 08:40:29.076509 4899 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.217:6443: connect: connection refused Oct 03 08:40:29 crc kubenswrapper[4899]: E1003 08:40:29.076575 4899 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.217:6443: connect: connection refused" logger="UnhandledError" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.482194 4899 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.217:6443: connect: connection refused Oct 03 08:40:29 crc kubenswrapper[4899]: E1003 08:40:29.487321 4899 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.217:6443: connect: connection refused" interval="3.2s" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.559285 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"54093b84153ff7894489025339a6977db0f46b4d2a3b49cc60077af28010d315"} Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.559332 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"306605d9d3b9065f7c546feb2478296ab23df5ee17f88e9732b78584614b2c3f"} Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.559343 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2890c36988f29fbea57dfd712a1d66304622780d233996abe521a7a8bf0a932d"} Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.559346 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.560260 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.560302 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.560314 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.562319 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a"} Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.562359 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb"} Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.562372 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273"} Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.562381 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c"} Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.562391 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b"} Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.562394 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.563131 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.563151 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.563163 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.563744 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"6f3d24871e87681e41b4247d18fbe1f63d9f6c5d9b2d4e70062453716a55026f"} Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.563786 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.564396 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.564422 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.564431 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.565279 4899 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7c0af6b6a558fd2aa16cedbc2a364531f8a578517bd5e100686759206300555b" exitCode=0 Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.565338 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7c0af6b6a558fd2aa16cedbc2a364531f8a578517bd5e100686759206300555b"} Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.565366 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.565389 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.566029 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.566058 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.566070 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.566325 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.566351 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.566360 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.709952 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.710923 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.710962 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.710976 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:29 crc kubenswrapper[4899]: I1003 08:40:29.711003 4899 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 08:40:29 crc kubenswrapper[4899]: E1003 08:40:29.711433 4899 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.217:6443: connect: connection refused" node="crc" Oct 03 08:40:30 crc kubenswrapper[4899]: W1003 08:40:30.040406 4899 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.217:6443: connect: connection refused Oct 03 08:40:30 crc kubenswrapper[4899]: E1003 08:40:30.040498 4899 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.217:6443: connect: connection refused" logger="UnhandledError" Oct 03 08:40:30 crc kubenswrapper[4899]: W1003 08:40:30.055522 4899 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.217:6443: connect: connection refused Oct 03 08:40:30 crc kubenswrapper[4899]: E1003 08:40:30.055574 4899 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.217:6443: connect: connection refused" logger="UnhandledError" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.569719 4899 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="34f2fc29bb339c15e73731f1ffa6d4dd1a64a742ec75024a04c96e8957691fd4" exitCode=0 Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.569772 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"34f2fc29bb339c15e73731f1ffa6d4dd1a64a742ec75024a04c96e8957691fd4"} Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.569827 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.569849 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.569853 4899 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.569879 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.569883 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.569831 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.570951 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.570973 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.570952 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.571001 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.571013 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.570981 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.571108 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.571138 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.571150 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.571483 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.571511 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.571522 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:30 crc kubenswrapper[4899]: I1003 08:40:30.715830 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 08:40:31 crc kubenswrapper[4899]: I1003 08:40:31.576675 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6dc91195ca643523a395a1df1205f27ea6c689b4417eea7b63979fc4c611ebed"} Oct 03 08:40:31 crc kubenswrapper[4899]: I1003 08:40:31.576719 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2411b656b734b1df8d0758579ba689f874307d86e3f8b5a61e95d8c4945c3dd4"} Oct 03 08:40:31 crc kubenswrapper[4899]: I1003 08:40:31.576739 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:31 crc kubenswrapper[4899]: I1003 08:40:31.576769 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"fbca5c4e351b7ac0d1626e29a903cfce19e178b5066c568b7f5ae1adbe1649a1"} Oct 03 08:40:31 crc kubenswrapper[4899]: I1003 08:40:31.576781 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:31 crc kubenswrapper[4899]: I1003 08:40:31.576789 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e2cb122797729e37384443595011ec18ce7e8666b461e43f9c4e03504474b795"} Oct 03 08:40:31 crc kubenswrapper[4899]: I1003 08:40:31.576866 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"94c5818c44f66f9b78d76df2e5512225f75faed42cb78d2e8c45f1e0d5dfe768"} Oct 03 08:40:31 crc kubenswrapper[4899]: I1003 08:40:31.577776 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:31 crc kubenswrapper[4899]: I1003 08:40:31.577807 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:31 crc kubenswrapper[4899]: I1003 08:40:31.577821 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:31 crc kubenswrapper[4899]: I1003 08:40:31.577836 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:31 crc kubenswrapper[4899]: I1003 08:40:31.577855 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:31 crc kubenswrapper[4899]: I1003 08:40:31.577866 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:31 crc kubenswrapper[4899]: I1003 08:40:31.743820 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 08:40:31 crc kubenswrapper[4899]: I1003 08:40:31.744046 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:31 crc kubenswrapper[4899]: I1003 08:40:31.745241 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:31 crc kubenswrapper[4899]: I1003 08:40:31.745289 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:31 crc kubenswrapper[4899]: I1003 08:40:31.745300 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:32 crc kubenswrapper[4899]: I1003 08:40:32.578736 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:32 crc kubenswrapper[4899]: I1003 08:40:32.578796 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:32 crc kubenswrapper[4899]: I1003 08:40:32.579851 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:32 crc kubenswrapper[4899]: I1003 08:40:32.579883 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:32 crc kubenswrapper[4899]: I1003 08:40:32.579896 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:32 crc kubenswrapper[4899]: I1003 08:40:32.579984 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:32 crc kubenswrapper[4899]: I1003 08:40:32.580012 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:32 crc kubenswrapper[4899]: I1003 08:40:32.580023 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:32 crc kubenswrapper[4899]: I1003 08:40:32.911768 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:32 crc kubenswrapper[4899]: I1003 08:40:32.912906 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:32 crc kubenswrapper[4899]: I1003 08:40:32.913023 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:32 crc kubenswrapper[4899]: I1003 08:40:32.913039 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:32 crc kubenswrapper[4899]: I1003 08:40:32.913059 4899 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 08:40:32 crc kubenswrapper[4899]: I1003 08:40:32.960729 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 03 08:40:33 crc kubenswrapper[4899]: I1003 08:40:33.581753 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:33 crc kubenswrapper[4899]: I1003 08:40:33.582777 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:33 crc kubenswrapper[4899]: I1003 08:40:33.582810 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:33 crc kubenswrapper[4899]: I1003 08:40:33.582821 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:34 crc kubenswrapper[4899]: I1003 08:40:34.271752 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 08:40:34 crc kubenswrapper[4899]: I1003 08:40:34.272018 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:34 crc kubenswrapper[4899]: I1003 08:40:34.273344 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:34 crc kubenswrapper[4899]: I1003 08:40:34.273404 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:34 crc kubenswrapper[4899]: I1003 08:40:34.273415 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:34 crc kubenswrapper[4899]: I1003 08:40:34.789709 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 08:40:34 crc kubenswrapper[4899]: I1003 08:40:34.789940 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:34 crc kubenswrapper[4899]: I1003 08:40:34.791113 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:34 crc kubenswrapper[4899]: I1003 08:40:34.791140 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:34 crc kubenswrapper[4899]: I1003 08:40:34.791149 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:35 crc kubenswrapper[4899]: I1003 08:40:35.307130 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 08:40:35 crc kubenswrapper[4899]: I1003 08:40:35.587344 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:35 crc kubenswrapper[4899]: I1003 08:40:35.589087 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:35 crc kubenswrapper[4899]: I1003 08:40:35.589132 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:35 crc kubenswrapper[4899]: I1003 08:40:35.589143 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:36 crc kubenswrapper[4899]: E1003 08:40:36.606923 4899 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 03 08:40:36 crc kubenswrapper[4899]: I1003 08:40:36.978813 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 08:40:36 crc kubenswrapper[4899]: I1003 08:40:36.979071 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:36 crc kubenswrapper[4899]: I1003 08:40:36.980171 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:36 crc kubenswrapper[4899]: I1003 08:40:36.980208 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:36 crc kubenswrapper[4899]: I1003 08:40:36.980220 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:38 crc kubenswrapper[4899]: I1003 08:40:38.308325 4899 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 03 08:40:38 crc kubenswrapper[4899]: I1003 08:40:38.308462 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 08:40:38 crc kubenswrapper[4899]: I1003 08:40:38.522200 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 08:40:38 crc kubenswrapper[4899]: I1003 08:40:38.522403 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:38 crc kubenswrapper[4899]: I1003 08:40:38.524531 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:38 crc kubenswrapper[4899]: I1003 08:40:38.524721 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:38 crc kubenswrapper[4899]: I1003 08:40:38.524817 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:38 crc kubenswrapper[4899]: I1003 08:40:38.527338 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 08:40:38 crc kubenswrapper[4899]: I1003 08:40:38.593342 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:38 crc kubenswrapper[4899]: I1003 08:40:38.594184 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:38 crc kubenswrapper[4899]: I1003 08:40:38.594235 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:38 crc kubenswrapper[4899]: I1003 08:40:38.594249 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:38 crc kubenswrapper[4899]: I1003 08:40:38.597582 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 08:40:39 crc kubenswrapper[4899]: I1003 08:40:39.595606 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:39 crc kubenswrapper[4899]: I1003 08:40:39.596503 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:39 crc kubenswrapper[4899]: I1003 08:40:39.596542 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:39 crc kubenswrapper[4899]: I1003 08:40:39.596552 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.157105 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.157287 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.158355 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.158391 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.158403 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.186072 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.322871 4899 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.322946 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.330584 4899 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.330650 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.597843 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.599114 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.599169 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.599183 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.607935 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.720963 4899 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]log ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]etcd ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/openshift.io-api-request-count-filter ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/openshift.io-startkubeinformers ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/start-apiserver-admission-initializer ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/generic-apiserver-start-informers ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/priority-and-fairness-config-consumer ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/priority-and-fairness-filter ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/storage-object-count-tracker-hook ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/start-apiextensions-informers ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/start-apiextensions-controllers ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/crd-informer-synced ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/start-system-namespaces-controller ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/start-cluster-authentication-info-controller ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/start-legacy-token-tracking-controller ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/start-service-ip-repair-controllers ok Oct 03 08:40:40 crc kubenswrapper[4899]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Oct 03 08:40:40 crc kubenswrapper[4899]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/priority-and-fairness-config-producer ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/bootstrap-controller ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/start-kube-aggregator-informers ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/apiservice-status-local-available-controller ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/apiservice-status-remote-available-controller ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/apiservice-registration-controller ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/apiservice-wait-for-first-sync ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/apiservice-discovery-controller ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/kube-apiserver-autoregistration ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]autoregister-completion ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/apiservice-openapi-controller ok Oct 03 08:40:40 crc kubenswrapper[4899]: [+]poststarthook/apiservice-openapiv3-controller ok Oct 03 08:40:40 crc kubenswrapper[4899]: livez check failed Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.721019 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.721066 4899 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 03 08:40:40 crc kubenswrapper[4899]: I1003 08:40:40.721120 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 03 08:40:41 crc kubenswrapper[4899]: I1003 08:40:41.600089 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:41 crc kubenswrapper[4899]: I1003 08:40:41.600860 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:41 crc kubenswrapper[4899]: I1003 08:40:41.600918 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:41 crc kubenswrapper[4899]: I1003 08:40:41.600932 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.314784 4899 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.318338 4899 trace.go:236] Trace[793240628]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Oct-2025 08:40:33.373) (total time: 11944ms): Oct 03 08:40:45 crc kubenswrapper[4899]: Trace[793240628]: ---"Objects listed" error: 11942ms (08:40:45.316) Oct 03 08:40:45 crc kubenswrapper[4899]: Trace[793240628]: [11.944279947s] [11.944279947s] END Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.318399 4899 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.320421 4899 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.320582 4899 trace.go:236] Trace[1614072453]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Oct-2025 08:40:32.495) (total time: 12825ms): Oct 03 08:40:45 crc kubenswrapper[4899]: Trace[1614072453]: ---"Objects listed" error: 12825ms (08:40:45.320) Oct 03 08:40:45 crc kubenswrapper[4899]: Trace[1614072453]: [12.825419058s] [12.825419058s] END Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.320613 4899 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.320910 4899 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.322618 4899 trace.go:236] Trace[2060868982]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Oct-2025 08:40:33.300) (total time: 12021ms): Oct 03 08:40:45 crc kubenswrapper[4899]: Trace[2060868982]: ---"Objects listed" error: 12021ms (08:40:45.322) Oct 03 08:40:45 crc kubenswrapper[4899]: Trace[2060868982]: [12.021698509s] [12.021698509s] END Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.322641 4899 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.322844 4899 trace.go:236] Trace[1823315740]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Oct-2025 08:40:35.124) (total time: 10198ms): Oct 03 08:40:45 crc kubenswrapper[4899]: Trace[1823315740]: ---"Objects listed" error: 10197ms (08:40:45.322) Oct 03 08:40:45 crc kubenswrapper[4899]: Trace[1823315740]: [10.198154036s] [10.198154036s] END Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.322981 4899 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.380347 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.385043 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.477456 4899 apiserver.go:52] "Watching apiserver" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.480220 4899 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.480678 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h"] Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.481185 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.481245 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.481294 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.481296 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.481374 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.481382 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.481698 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.481766 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.481802 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.484829 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.484941 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.485127 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.485142 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.485358 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.485382 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.486920 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.486929 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.488344 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.488620 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-s9sv8"] Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.488992 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-s9sv8" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.490801 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.491350 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.491784 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.513682 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.522175 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.522216 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.522237 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.522261 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.522278 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.522297 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.522316 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.522354 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.522370 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.522385 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.522408 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.522428 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.522446 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.522500 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.522771 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.523069 4899 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.523137 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 08:40:46.023117942 +0000 UTC m=+20.130602895 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.523160 4899 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.523188 4899 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.523201 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.523232 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 08:40:46.023217556 +0000 UTC m=+20.130702509 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.524493 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.526526 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.527557 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.527762 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.535911 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.535947 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.535960 4899 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.536021 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 08:40:46.036000895 +0000 UTC m=+20.143485838 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.540572 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.542091 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.545688 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.547292 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.553612 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.553665 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.553681 4899 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.553746 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 08:40:46.053726832 +0000 UTC m=+20.161211785 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.560034 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.571165 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.581763 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.582014 4899 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.590538 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.601289 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.613083 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.619029 4899 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.619774 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623193 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623237 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623278 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623301 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623324 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623346 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623370 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623391 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623413 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623434 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623454 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623475 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623496 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623521 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623543 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623565 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623587 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623623 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623646 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623666 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623685 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623707 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623790 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623817 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623838 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623861 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623882 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623923 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623946 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623973 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.623995 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624017 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624040 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624060 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624080 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624117 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624149 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624169 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624190 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624213 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624233 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624301 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624323 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624343 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624368 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624389 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624410 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624430 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624451 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624471 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624492 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624514 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624537 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624584 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624608 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624665 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624692 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624714 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624735 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624762 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624786 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624808 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624830 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624850 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624874 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624911 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624949 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624968 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624987 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625007 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625027 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625047 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625073 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625094 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625113 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625132 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625150 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625169 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625190 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625213 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625236 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625256 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625279 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625302 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625323 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625345 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625367 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625390 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625424 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625447 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625472 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625496 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625517 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625539 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625561 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625582 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625604 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625628 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625653 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625676 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625697 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625720 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625743 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625766 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625791 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625815 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625837 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625859 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625883 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625940 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625964 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625983 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626006 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626108 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626134 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626159 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626185 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626208 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626231 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626256 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626280 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626306 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626328 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626352 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626375 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626399 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626424 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626451 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626474 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626500 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626525 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626550 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626581 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626606 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626627 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626653 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626678 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626701 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626724 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626749 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626770 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626799 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626825 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626853 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626942 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624205 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.628101 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624715 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.624884 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625052 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625175 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625294 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.628269 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625414 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625589 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625614 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.625992 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626156 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626347 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626359 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626369 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626632 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626674 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626722 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626759 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626766 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626796 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626830 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626834 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626943 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.627012 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.627089 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.627274 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.627288 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.627303 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.627367 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.627466 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.627480 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.627619 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.627648 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.627650 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.627685 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.627719 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.628453 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.627791 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.627845 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.627869 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.627946 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.628001 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.628047 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.628088 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.628236 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.628380 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.628466 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.628475 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.628587 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.628637 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.628683 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.628734 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.628739 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.626970 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.628932 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.628959 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629020 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629040 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629048 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629073 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629095 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629115 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629135 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629157 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629177 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629188 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629201 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629220 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629264 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629285 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629304 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629309 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629322 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629324 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629340 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629357 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629374 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629393 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629415 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629437 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629456 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629476 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629491 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629521 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629537 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629552 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629573 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629590 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629609 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629620 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629625 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629672 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629702 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629726 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629751 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629777 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629803 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629827 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629850 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629876 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629922 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629944 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629967 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.629987 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630008 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630031 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630052 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630068 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630082 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630097 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630113 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630258 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/ab573102-ad81-4abc-ad0d-c002c08bc84c-hosts-file\") pod \"node-resolver-s9sv8\" (UID: \"ab573102-ad81-4abc-ad0d-c002c08bc84c\") " pod="openshift-dns/node-resolver-s9sv8" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630278 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5szw\" (UniqueName: \"kubernetes.io/projected/ab573102-ad81-4abc-ad0d-c002c08bc84c-kube-api-access-g5szw\") pod \"node-resolver-s9sv8\" (UID: \"ab573102-ad81-4abc-ad0d-c002c08bc84c\") " pod="openshift-dns/node-resolver-s9sv8" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630327 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630343 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630395 4899 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630406 4899 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630528 4899 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630537 4899 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630547 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630555 4899 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630566 4899 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630575 4899 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630584 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630593 4899 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630602 4899 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630622 4899 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630633 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630643 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630652 4899 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630661 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630671 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630681 4899 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630689 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630701 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630716 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630728 4899 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630738 4899 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630747 4899 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630756 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630765 4899 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630775 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630784 4899 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630793 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630802 4899 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630811 4899 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630821 4899 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630830 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630841 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630849 4899 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630858 4899 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630868 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630877 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630886 4899 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630918 4899 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630931 4899 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630943 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630955 4899 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630967 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630980 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.630994 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.631005 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.631016 4899 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.631026 4899 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.631035 4899 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.631043 4899 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.631054 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.631103 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.631113 4899 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.631122 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.631132 4899 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.631142 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.631154 4899 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.631167 4899 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.631178 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.631190 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.631202 4899 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.631305 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.632005 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.632236 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.632312 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.632469 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.632495 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.632625 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.632730 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.632927 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: E1003 08:40:45.632958 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:40:46.132937789 +0000 UTC m=+20.240422842 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.632983 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.633087 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.633165 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.633173 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.633266 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.633393 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.633459 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.633575 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.633762 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.633775 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.633836 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.633971 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.634635 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.634748 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.634782 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.634824 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.635214 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.635521 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.635622 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.635945 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.636078 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.636472 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.637168 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.637187 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.637438 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.637499 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.637591 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.637637 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.637719 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.637732 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.637776 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.637800 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.637880 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.638123 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.638485 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.638501 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.638545 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.638837 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.638925 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.639134 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.638949 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.638952 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.639001 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.639211 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.639237 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.639244 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.639290 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.639302 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.639398 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.639510 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.639521 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.639552 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.639737 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.639845 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.640836 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.641288 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.641609 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.641661 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.641918 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.641938 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.642102 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.642204 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.642242 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.642450 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.642835 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.642971 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.643046 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.643293 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.643681 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.643738 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.643738 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.643766 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.643796 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.644107 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.644176 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.644226 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.644261 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.644289 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.644477 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.644481 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.644700 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.645065 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.645086 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.645068 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.645395 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.645546 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.645687 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.645809 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.645846 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.645757 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.646122 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.646000 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.646012 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.646298 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.646673 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.646691 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.646939 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.646978 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.647005 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.647021 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.647041 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.648608 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.648665 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.648720 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.649240 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.649732 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.649987 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.650065 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.650096 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.650295 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.651730 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.653337 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.653451 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.654038 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.654276 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.654410 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.654550 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.656886 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.657229 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.658341 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.679963 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.684192 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.690187 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.693840 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.719983 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.720582 4899 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.720633 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.724040 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.727161 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.728673 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.731561 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/ab573102-ad81-4abc-ad0d-c002c08bc84c-hosts-file\") pod \"node-resolver-s9sv8\" (UID: \"ab573102-ad81-4abc-ad0d-c002c08bc84c\") " pod="openshift-dns/node-resolver-s9sv8" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.731599 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5szw\" (UniqueName: \"kubernetes.io/projected/ab573102-ad81-4abc-ad0d-c002c08bc84c-kube-api-access-g5szw\") pod \"node-resolver-s9sv8\" (UID: \"ab573102-ad81-4abc-ad0d-c002c08bc84c\") " pod="openshift-dns/node-resolver-s9sv8" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.731711 4899 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.731717 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/ab573102-ad81-4abc-ad0d-c002c08bc84c-hosts-file\") pod \"node-resolver-s9sv8\" (UID: \"ab573102-ad81-4abc-ad0d-c002c08bc84c\") " pod="openshift-dns/node-resolver-s9sv8" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.731725 4899 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.731815 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.731834 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.731848 4899 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.731868 4899 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.731880 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.731917 4899 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.731931 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.731948 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.731960 4899 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.731972 4899 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.731984 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732000 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732012 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732024 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732040 4899 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732054 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732066 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732079 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732093 4899 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732105 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732117 4899 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732162 4899 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732184 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732193 4899 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732203 4899 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732215 4899 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732230 4899 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732240 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732249 4899 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732261 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732271 4899 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732279 4899 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732287 4899 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732298 4899 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732321 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732331 4899 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732343 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732352 4899 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732360 4899 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732368 4899 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732378 4899 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732387 4899 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732395 4899 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732403 4899 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732414 4899 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732422 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732431 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732441 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732450 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732458 4899 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732467 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732490 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732499 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732508 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732517 4899 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732528 4899 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732536 4899 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732544 4899 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732553 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732563 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732571 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732580 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732590 4899 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732600 4899 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.732691 4899 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733341 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733360 4899 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733372 4899 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733385 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733397 4899 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733429 4899 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733441 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733454 4899 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733465 4899 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733476 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733487 4899 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733519 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733532 4899 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733597 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733610 4899 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733622 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733635 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733647 4899 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733659 4899 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733672 4899 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733683 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733694 4899 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733705 4899 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733742 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733753 4899 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733764 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733776 4899 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733788 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733799 4899 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733811 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733822 4899 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733837 4899 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733848 4899 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733859 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733869 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733880 4899 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733913 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733926 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733937 4899 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733947 4899 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733959 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733971 4899 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733982 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.733993 4899 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734004 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734014 4899 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734024 4899 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734034 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734044 4899 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734055 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734067 4899 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734077 4899 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734087 4899 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734099 4899 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734110 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734120 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734131 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734150 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734161 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734290 4899 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734302 4899 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734313 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734325 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734356 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.734369 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.737604 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.745329 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.747989 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5szw\" (UniqueName: \"kubernetes.io/projected/ab573102-ad81-4abc-ad0d-c002c08bc84c-kube-api-access-g5szw\") pod \"node-resolver-s9sv8\" (UID: \"ab573102-ad81-4abc-ad0d-c002c08bc84c\") " pod="openshift-dns/node-resolver-s9sv8" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.755967 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.765591 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.775367 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.784501 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.796329 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.803354 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.806510 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 08:40:45 crc kubenswrapper[4899]: W1003 08:40:45.808752 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-d0ed74aafbbfe06a46896336681982cbe3bfd704f87738df3c3a6ac31195f416 WatchSource:0}: Error finding container d0ed74aafbbfe06a46896336681982cbe3bfd704f87738df3c3a6ac31195f416: Status 404 returned error can't find the container with id d0ed74aafbbfe06a46896336681982cbe3bfd704f87738df3c3a6ac31195f416 Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.813649 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.819121 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-s9sv8" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.824651 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.850182 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.870437 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: W1003 08:40:45.882563 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-cc14d30eeb1a444766af92751c1256d43e91b7a9be3479785fde0a42d119b099 WatchSource:0}: Error finding container cc14d30eeb1a444766af92751c1256d43e91b7a9be3479785fde0a42d119b099: Status 404 returned error can't find the container with id cc14d30eeb1a444766af92751c1256d43e91b7a9be3479785fde0a42d119b099 Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.886130 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.896448 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.912204 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.928144 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.939964 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:45 crc kubenswrapper[4899]: I1003 08:40:45.951440 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.040233 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.040292 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.040319 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:40:46 crc kubenswrapper[4899]: E1003 08:40:46.040367 4899 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 08:40:46 crc kubenswrapper[4899]: E1003 08:40:46.040410 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 08:40:46 crc kubenswrapper[4899]: E1003 08:40:46.040427 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 08:40:46 crc kubenswrapper[4899]: E1003 08:40:46.040439 4899 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:46 crc kubenswrapper[4899]: E1003 08:40:46.040440 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 08:40:47.040418845 +0000 UTC m=+21.147903798 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 08:40:46 crc kubenswrapper[4899]: E1003 08:40:46.040481 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 08:40:47.040471187 +0000 UTC m=+21.147956140 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:46 crc kubenswrapper[4899]: E1003 08:40:46.040482 4899 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 08:40:46 crc kubenswrapper[4899]: E1003 08:40:46.040514 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 08:40:47.040505808 +0000 UTC m=+21.147990761 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.140824 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.140925 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:40:46 crc kubenswrapper[4899]: E1003 08:40:46.141059 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 08:40:46 crc kubenswrapper[4899]: E1003 08:40:46.141074 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 08:40:46 crc kubenswrapper[4899]: E1003 08:40:46.141083 4899 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:46 crc kubenswrapper[4899]: E1003 08:40:46.141127 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 08:40:47.141114505 +0000 UTC m=+21.248599458 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:46 crc kubenswrapper[4899]: E1003 08:40:46.141174 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:40:47.141169167 +0000 UTC m=+21.248654120 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.353585 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-jpgn4"] Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.354525 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-jpgn4" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.360952 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.369608 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.369777 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.370035 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.386868 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.406598 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.421876 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.436466 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.443161 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/4478ebbd-6973-4ba3-a95a-311406b51cdf-serviceca\") pod \"node-ca-jpgn4\" (UID: \"4478ebbd-6973-4ba3-a95a-311406b51cdf\") " pod="openshift-image-registry/node-ca-jpgn4" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.443197 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7vb9\" (UniqueName: \"kubernetes.io/projected/4478ebbd-6973-4ba3-a95a-311406b51cdf-kube-api-access-v7vb9\") pod \"node-ca-jpgn4\" (UID: \"4478ebbd-6973-4ba3-a95a-311406b51cdf\") " pod="openshift-image-registry/node-ca-jpgn4" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.443259 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4478ebbd-6973-4ba3-a95a-311406b51cdf-host\") pod \"node-ca-jpgn4\" (UID: \"4478ebbd-6973-4ba3-a95a-311406b51cdf\") " pod="openshift-image-registry/node-ca-jpgn4" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.448758 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.459755 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.479187 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.491372 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.501770 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.515071 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.541809 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.542492 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.543776 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.544439 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.544820 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4478ebbd-6973-4ba3-a95a-311406b51cdf-host\") pod \"node-ca-jpgn4\" (UID: \"4478ebbd-6973-4ba3-a95a-311406b51cdf\") " pod="openshift-image-registry/node-ca-jpgn4" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.544878 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/4478ebbd-6973-4ba3-a95a-311406b51cdf-serviceca\") pod \"node-ca-jpgn4\" (UID: \"4478ebbd-6973-4ba3-a95a-311406b51cdf\") " pod="openshift-image-registry/node-ca-jpgn4" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.544956 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7vb9\" (UniqueName: \"kubernetes.io/projected/4478ebbd-6973-4ba3-a95a-311406b51cdf-kube-api-access-v7vb9\") pod \"node-ca-jpgn4\" (UID: \"4478ebbd-6973-4ba3-a95a-311406b51cdf\") " pod="openshift-image-registry/node-ca-jpgn4" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.545350 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4478ebbd-6973-4ba3-a95a-311406b51cdf-host\") pod \"node-ca-jpgn4\" (UID: \"4478ebbd-6973-4ba3-a95a-311406b51cdf\") " pod="openshift-image-registry/node-ca-jpgn4" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.545502 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.546098 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.546520 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/4478ebbd-6973-4ba3-a95a-311406b51cdf-serviceca\") pod \"node-ca-jpgn4\" (UID: \"4478ebbd-6973-4ba3-a95a-311406b51cdf\") " pod="openshift-image-registry/node-ca-jpgn4" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.546815 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.547620 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.547868 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.548680 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.549793 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.550287 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.552269 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.552761 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.553348 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.554394 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.554911 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.557427 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.557931 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.558467 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.559490 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.559935 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.560395 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.562218 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.562636 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.563661 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.564124 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.564726 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.566048 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7vb9\" (UniqueName: \"kubernetes.io/projected/4478ebbd-6973-4ba3-a95a-311406b51cdf-kube-api-access-v7vb9\") pod \"node-ca-jpgn4\" (UID: \"4478ebbd-6973-4ba3-a95a-311406b51cdf\") " pod="openshift-image-registry/node-ca-jpgn4" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.567473 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.567954 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.576141 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.577046 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.577350 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.578259 4899 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.578464 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.580402 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.581547 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.582125 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.584237 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.585642 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.586385 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.587682 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.588619 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.589043 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.589476 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.590841 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.592179 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.592969 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.593972 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.594776 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.595870 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.596735 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.597761 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.598474 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.599209 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.600426 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.600416 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.601407 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.602794 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.616524 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97"} Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.616596 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"2a5e6e2b7719ac3cdc52a5a9b41f3da0c54217581784dda379f8dd1a5022e9b5"} Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.618951 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-s9sv8" event={"ID":"ab573102-ad81-4abc-ad0d-c002c08bc84c","Type":"ContainerStarted","Data":"dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81"} Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.618982 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-s9sv8" event={"ID":"ab573102-ad81-4abc-ad0d-c002c08bc84c","Type":"ContainerStarted","Data":"f74e54905b43ac802d394d1ce4f5fc2c1f11808b1ef37e7e30fe8ecac353b4a9"} Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.620593 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.624261 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce"} Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.624472 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2"} Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.624547 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"d0ed74aafbbfe06a46896336681982cbe3bfd704f87738df3c3a6ac31195f416"} Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.626864 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.629958 4899 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a" exitCode=255 Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.630050 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a"} Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.630586 4899 scope.go:117] "RemoveContainer" containerID="a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.631376 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"cc14d30eeb1a444766af92751c1256d43e91b7a9be3479785fde0a42d119b099"} Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.636231 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.652194 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.669166 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.675631 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-jpgn4" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.695725 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.708858 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.723820 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.740249 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.756164 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.772473 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.782143 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.797408 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.812963 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.821574 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-pgdhq"] Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.821922 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-pgdhq" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.824579 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.824757 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.824917 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.825068 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.825262 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.826131 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.840745 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.855242 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.867293 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.892850 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.907247 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.923922 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.934343 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.947516 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-host-run-multus-certs\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.947582 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-host-run-netns\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.947606 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-cnibin\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.947626 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-hostroot\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.947647 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-multus-cni-dir\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.947666 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-os-release\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.947686 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-host-var-lib-kubelet\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.947703 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.947788 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-etc-kubernetes\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.947854 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-multus-conf-dir\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.947991 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6f75d8d8-3b12-42bf-b447-0afb4413fd54-multus-daemon-config\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.948057 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-host-var-lib-cni-multus\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.948088 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxvgr\" (UniqueName: \"kubernetes.io/projected/6f75d8d8-3b12-42bf-b447-0afb4413fd54-kube-api-access-kxvgr\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.948108 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-multus-socket-dir-parent\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.948164 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6f75d8d8-3b12-42bf-b447-0afb4413fd54-cni-binary-copy\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.948183 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-host-run-k8s-cni-cncf-io\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.948212 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-system-cni-dir\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.948245 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-host-var-lib-cni-bin\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.964460 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:46 crc kubenswrapper[4899]: I1003 08:40:46.978677 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.001121 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.016601 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049004 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-multus-conf-dir\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049053 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6f75d8d8-3b12-42bf-b447-0afb4413fd54-multus-daemon-config\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049078 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049104 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-host-var-lib-cni-multus\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049126 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxvgr\" (UniqueName: \"kubernetes.io/projected/6f75d8d8-3b12-42bf-b447-0afb4413fd54-kube-api-access-kxvgr\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049146 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049173 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-multus-conf-dir\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049220 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6f75d8d8-3b12-42bf-b447-0afb4413fd54-cni-binary-copy\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: E1003 08:40:47.049247 4899 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 08:40:47 crc kubenswrapper[4899]: E1003 08:40:47.049315 4899 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 08:40:47 crc kubenswrapper[4899]: E1003 08:40:47.049321 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 08:40:49.049302937 +0000 UTC m=+23.156787960 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 08:40:47 crc kubenswrapper[4899]: E1003 08:40:47.049451 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 08:40:49.04940247 +0000 UTC m=+23.156887513 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049494 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-multus-socket-dir-parent\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049552 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-host-run-k8s-cni-cncf-io\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049582 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-system-cni-dir\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049607 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-host-var-lib-cni-bin\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049637 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-host-run-netns\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049647 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-host-run-k8s-cni-cncf-io\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049661 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-host-run-multus-certs\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049668 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-multus-socket-dir-parent\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049709 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-host-var-lib-cni-bin\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: E1003 08:40:47.049775 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 08:40:47 crc kubenswrapper[4899]: E1003 08:40:47.049798 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049712 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-host-run-netns\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049694 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-host-run-multus-certs\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049703 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049800 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-system-cni-dir\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: E1003 08:40:47.049813 4899 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049949 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-cnibin\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: E1003 08:40:47.049949 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 08:40:49.04993615 +0000 UTC m=+23.157421313 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049880 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-cnibin\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.050004 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-multus-cni-dir\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.050027 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-os-release\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.050049 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-host-var-lib-kubelet\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.050070 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-hostroot\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.050091 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-etc-kubernetes\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.050148 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-etc-kubernetes\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.050151 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-host-var-lib-kubelet\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.050170 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-hostroot\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.050193 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-os-release\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.050198 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-multus-cni-dir\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.050496 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6f75d8d8-3b12-42bf-b447-0afb4413fd54-multus-daemon-config\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.049306 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6f75d8d8-3b12-42bf-b447-0afb4413fd54-host-var-lib-cni-multus\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.050618 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6f75d8d8-3b12-42bf-b447-0afb4413fd54-cni-binary-copy\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.078345 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxvgr\" (UniqueName: \"kubernetes.io/projected/6f75d8d8-3b12-42bf-b447-0afb4413fd54-kube-api-access-kxvgr\") pod \"multus-pgdhq\" (UID: \"6f75d8d8-3b12-42bf-b447-0afb4413fd54\") " pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.136714 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-pgdhq" Oct 03 08:40:47 crc kubenswrapper[4899]: W1003 08:40:47.147263 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f75d8d8_3b12_42bf_b447_0afb4413fd54.slice/crio-50e760d848b279970f0a2bd5696d7d32e4c70f01fb0fc9c9524879f231e55a79 WatchSource:0}: Error finding container 50e760d848b279970f0a2bd5696d7d32e4c70f01fb0fc9c9524879f231e55a79: Status 404 returned error can't find the container with id 50e760d848b279970f0a2bd5696d7d32e4c70f01fb0fc9c9524879f231e55a79 Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.150487 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.150579 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:40:47 crc kubenswrapper[4899]: E1003 08:40:47.150638 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:40:49.150611618 +0000 UTC m=+23.258096631 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:40:47 crc kubenswrapper[4899]: E1003 08:40:47.150783 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 08:40:47 crc kubenswrapper[4899]: E1003 08:40:47.150806 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 08:40:47 crc kubenswrapper[4899]: E1003 08:40:47.150818 4899 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:47 crc kubenswrapper[4899]: E1003 08:40:47.150867 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 08:40:49.150851817 +0000 UTC m=+23.258336820 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.206261 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-wxhwc"] Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.208324 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-t2h4g"] Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.208742 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.208793 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-g7f7c"] Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.209959 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.210628 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.212458 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.212468 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.214308 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.214557 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.214852 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.215482 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.215633 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.215837 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.216010 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.216125 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.216195 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.216194 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.216298 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.216470 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.229542 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.241207 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.265015 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.305394 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.352760 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3e8a7198-81da-475c-ac88-a460ba4064d1-proxy-tls\") pod \"machine-config-daemon-t2h4g\" (UID: \"3e8a7198-81da-475c-ac88-a460ba4064d1\") " pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.352828 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-run-ovn-kubernetes\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.352852 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3e8a7198-81da-475c-ac88-a460ba4064d1-mcd-auth-proxy-config\") pod \"machine-config-daemon-t2h4g\" (UID: \"3e8a7198-81da-475c-ac88-a460ba4064d1\") " pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353044 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-cni-bin\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353098 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353131 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twt24\" (UniqueName: \"kubernetes.io/projected/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-kube-api-access-twt24\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353156 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-systemd-units\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353176 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-run-systemd\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353220 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-run-ovn\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353255 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-node-log\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353306 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/764a7341-6f52-4fc1-9086-87b90aa126e8-ovnkube-script-lib\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353330 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-var-lib-openvswitch\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353368 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353389 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-run-openvswitch\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353453 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-cnibin\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353483 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-run-netns\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353522 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-kubelet\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353548 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-system-cni-dir\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353568 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-etc-openvswitch\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353619 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-tuning-conf-dir\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353640 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7fgv\" (UniqueName: \"kubernetes.io/projected/764a7341-6f52-4fc1-9086-87b90aa126e8-kube-api-access-s7fgv\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353679 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3e8a7198-81da-475c-ac88-a460ba4064d1-rootfs\") pod \"machine-config-daemon-t2h4g\" (UID: \"3e8a7198-81da-475c-ac88-a460ba4064d1\") " pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353711 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-slash\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353730 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-cni-netd\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353768 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/764a7341-6f52-4fc1-9086-87b90aa126e8-ovn-node-metrics-cert\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353804 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqmnb\" (UniqueName: \"kubernetes.io/projected/3e8a7198-81da-475c-ac88-a460ba4064d1-kube-api-access-lqmnb\") pod \"machine-config-daemon-t2h4g\" (UID: \"3e8a7198-81da-475c-ac88-a460ba4064d1\") " pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353861 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-os-release\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353880 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/764a7341-6f52-4fc1-9086-87b90aa126e8-ovnkube-config\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353923 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-cni-binary-copy\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353943 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-log-socket\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.353973 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/764a7341-6f52-4fc1-9086-87b90aa126e8-env-overrides\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.363549 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.404169 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.424599 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455183 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-cni-binary-copy\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455227 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-log-socket\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455250 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/764a7341-6f52-4fc1-9086-87b90aa126e8-env-overrides\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455268 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3e8a7198-81da-475c-ac88-a460ba4064d1-proxy-tls\") pod \"machine-config-daemon-t2h4g\" (UID: \"3e8a7198-81da-475c-ac88-a460ba4064d1\") " pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455283 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-run-ovn-kubernetes\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455300 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3e8a7198-81da-475c-ac88-a460ba4064d1-mcd-auth-proxy-config\") pod \"machine-config-daemon-t2h4g\" (UID: \"3e8a7198-81da-475c-ac88-a460ba4064d1\") " pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455320 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-cni-bin\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455335 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455351 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twt24\" (UniqueName: \"kubernetes.io/projected/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-kube-api-access-twt24\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455366 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-systemd-units\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455379 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-run-systemd\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455392 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-run-ovn\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455407 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-node-log\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455421 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/764a7341-6f52-4fc1-9086-87b90aa126e8-ovnkube-script-lib\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455438 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-var-lib-openvswitch\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455452 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455466 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-run-openvswitch\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455483 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-cnibin\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455499 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-run-netns\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455512 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-kubelet\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455526 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-system-cni-dir\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455540 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-etc-openvswitch\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455554 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-tuning-conf-dir\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455581 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7fgv\" (UniqueName: \"kubernetes.io/projected/764a7341-6f52-4fc1-9086-87b90aa126e8-kube-api-access-s7fgv\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455597 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3e8a7198-81da-475c-ac88-a460ba4064d1-rootfs\") pod \"machine-config-daemon-t2h4g\" (UID: \"3e8a7198-81da-475c-ac88-a460ba4064d1\") " pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455611 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-slash\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455626 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-cni-netd\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455642 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/764a7341-6f52-4fc1-9086-87b90aa126e8-ovn-node-metrics-cert\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455656 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqmnb\" (UniqueName: \"kubernetes.io/projected/3e8a7198-81da-475c-ac88-a460ba4064d1-kube-api-access-lqmnb\") pod \"machine-config-daemon-t2h4g\" (UID: \"3e8a7198-81da-475c-ac88-a460ba4064d1\") " pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455684 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-os-release\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.455699 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/764a7341-6f52-4fc1-9086-87b90aa126e8-ovnkube-config\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.456351 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/764a7341-6f52-4fc1-9086-87b90aa126e8-ovnkube-config\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.456809 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-cni-binary-copy\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.456850 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-log-socket\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.457138 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/764a7341-6f52-4fc1-9086-87b90aa126e8-env-overrides\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.457539 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-run-ovn-kubernetes\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.457806 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-cnibin\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.457863 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-cni-bin\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.457906 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.457921 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-var-lib-openvswitch\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.457952 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-node-log\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.457986 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-system-cni-dir\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.457985 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-run-ovn\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.458034 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-etc-openvswitch\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.458062 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-run-openvswitch\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.458053 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-systemd-units\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.458130 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-tuning-conf-dir\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.458041 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-kubelet\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.458019 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-run-netns\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.458103 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-slash\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.458118 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-run-systemd\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.458123 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3e8a7198-81da-475c-ac88-a460ba4064d1-rootfs\") pod \"machine-config-daemon-t2h4g\" (UID: \"3e8a7198-81da-475c-ac88-a460ba4064d1\") " pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.458066 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-cni-netd\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.458189 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-os-release\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.458629 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/764a7341-6f52-4fc1-9086-87b90aa126e8-ovnkube-script-lib\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.458729 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.458922 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3e8a7198-81da-475c-ac88-a460ba4064d1-mcd-auth-proxy-config\") pod \"machine-config-daemon-t2h4g\" (UID: \"3e8a7198-81da-475c-ac88-a460ba4064d1\") " pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.461761 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3e8a7198-81da-475c-ac88-a460ba4064d1-proxy-tls\") pod \"machine-config-daemon-t2h4g\" (UID: \"3e8a7198-81da-475c-ac88-a460ba4064d1\") " pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.462253 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/764a7341-6f52-4fc1-9086-87b90aa126e8-ovn-node-metrics-cert\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.468781 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.494005 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twt24\" (UniqueName: \"kubernetes.io/projected/0d4e5d0f-c610-483a-a7e0-92c39dce1b12-kube-api-access-twt24\") pod \"multus-additional-cni-plugins-wxhwc\" (UID: \"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\") " pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.514036 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7fgv\" (UniqueName: \"kubernetes.io/projected/764a7341-6f52-4fc1-9086-87b90aa126e8-kube-api-access-s7fgv\") pod \"ovnkube-node-g7f7c\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.525018 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.526078 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.526080 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.526125 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:40:47 crc kubenswrapper[4899]: E1003 08:40:47.526289 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:40:47 crc kubenswrapper[4899]: E1003 08:40:47.526372 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:40:47 crc kubenswrapper[4899]: E1003 08:40:47.526504 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:40:47 crc kubenswrapper[4899]: W1003 08:40:47.537757 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d4e5d0f_c610_483a_a7e0_92c39dce1b12.slice/crio-553c3184e1dc0cf949f68331e9d9b30b60a0e5c9d89af5d0d3d63f0c68552e74 WatchSource:0}: Error finding container 553c3184e1dc0cf949f68331e9d9b30b60a0e5c9d89af5d0d3d63f0c68552e74: Status 404 returned error can't find the container with id 553c3184e1dc0cf949f68331e9d9b30b60a0e5c9d89af5d0d3d63f0c68552e74 Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.538001 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.538832 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqmnb\" (UniqueName: \"kubernetes.io/projected/3e8a7198-81da-475c-ac88-a460ba4064d1-kube-api-access-lqmnb\") pod \"machine-config-daemon-t2h4g\" (UID: \"3e8a7198-81da-475c-ac88-a460ba4064d1\") " pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.564500 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.611464 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.642161 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerStarted","Data":"adf5c7a5ad44609f6b8e85e4025e2c27f82e46e26d5a3c6f5f840e5339652b80"} Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.646271 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" event={"ID":"0d4e5d0f-c610-483a-a7e0-92c39dce1b12","Type":"ContainerStarted","Data":"553c3184e1dc0cf949f68331e9d9b30b60a0e5c9d89af5d0d3d63f0c68552e74"} Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.649887 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.650787 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pgdhq" event={"ID":"6f75d8d8-3b12-42bf-b447-0afb4413fd54","Type":"ContainerStarted","Data":"f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f"} Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.650842 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pgdhq" event={"ID":"6f75d8d8-3b12-42bf-b447-0afb4413fd54","Type":"ContainerStarted","Data":"50e760d848b279970f0a2bd5696d7d32e4c70f01fb0fc9c9524879f231e55a79"} Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.654326 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-jpgn4" event={"ID":"4478ebbd-6973-4ba3-a95a-311406b51cdf","Type":"ContainerStarted","Data":"261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa"} Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.654372 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-jpgn4" event={"ID":"4478ebbd-6973-4ba3-a95a-311406b51cdf","Type":"ContainerStarted","Data":"bba6c38b62a6affbc6a9dc34c61de2d525decf4b947728bba28f22dbcf7b309b"} Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.663190 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.670840 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1"} Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.671794 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.698347 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.730293 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.765593 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.810067 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.832988 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.853000 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.886872 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.925174 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:47 crc kubenswrapper[4899]: I1003 08:40:47.967776 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.006271 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.052712 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.097485 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.126464 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.165831 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.204450 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.260867 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.676793 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e"} Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.678437 4899 generic.go:334] "Generic (PLEG): container finished" podID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerID="965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606" exitCode=0 Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.678506 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerDied","Data":"965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606"} Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.680260 4899 generic.go:334] "Generic (PLEG): container finished" podID="0d4e5d0f-c610-483a-a7e0-92c39dce1b12" containerID="8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4" exitCode=0 Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.680355 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" event={"ID":"0d4e5d0f-c610-483a-a7e0-92c39dce1b12","Type":"ContainerDied","Data":"8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4"} Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.682459 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerStarted","Data":"5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d"} Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.682531 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerStarted","Data":"1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395"} Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.682545 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerStarted","Data":"e06af47aceb1c87dce317473f3eedf32061d411f7481b6adc643a059b48ac668"} Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.693746 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.705654 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.717990 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.731319 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.744983 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.754858 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.766383 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.779522 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.792294 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.809136 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.827666 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.841253 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.852971 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.865391 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.883483 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.898358 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.927344 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:48 crc kubenswrapper[4899]: I1003 08:40:48.966477 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.004961 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.043562 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.073663 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.073719 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.073753 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:40:49 crc kubenswrapper[4899]: E1003 08:40:49.073865 4899 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 08:40:49 crc kubenswrapper[4899]: E1003 08:40:49.073935 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 08:40:53.073919897 +0000 UTC m=+27.181404850 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 08:40:49 crc kubenswrapper[4899]: E1003 08:40:49.074251 4899 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 08:40:49 crc kubenswrapper[4899]: E1003 08:40:49.074289 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 08:40:53.074279749 +0000 UTC m=+27.181764702 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 08:40:49 crc kubenswrapper[4899]: E1003 08:40:49.074396 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 08:40:49 crc kubenswrapper[4899]: E1003 08:40:49.074417 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 08:40:49 crc kubenswrapper[4899]: E1003 08:40:49.074428 4899 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:49 crc kubenswrapper[4899]: E1003 08:40:49.074455 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 08:40:53.074446985 +0000 UTC m=+27.181931938 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.086096 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.124256 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.168874 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.174454 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.174536 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:40:49 crc kubenswrapper[4899]: E1003 08:40:49.174664 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 08:40:49 crc kubenswrapper[4899]: E1003 08:40:49.174683 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 08:40:49 crc kubenswrapper[4899]: E1003 08:40:49.174693 4899 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:49 crc kubenswrapper[4899]: E1003 08:40:49.174730 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 08:40:53.17471855 +0000 UTC m=+27.282203503 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:49 crc kubenswrapper[4899]: E1003 08:40:49.174773 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:40:53.174768172 +0000 UTC m=+27.282253125 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.203080 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.251584 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.284945 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.325667 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.365320 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.526043 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:40:49 crc kubenswrapper[4899]: E1003 08:40:49.526157 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.526211 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:40:49 crc kubenswrapper[4899]: E1003 08:40:49.526249 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.526287 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:40:49 crc kubenswrapper[4899]: E1003 08:40:49.526325 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.690595 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerStarted","Data":"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b"} Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.690943 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerStarted","Data":"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad"} Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.690956 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerStarted","Data":"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6"} Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.690965 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerStarted","Data":"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72"} Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.690976 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerStarted","Data":"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68"} Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.690989 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerStarted","Data":"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e"} Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.693714 4899 generic.go:334] "Generic (PLEG): container finished" podID="0d4e5d0f-c610-483a-a7e0-92c39dce1b12" containerID="a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c" exitCode=0 Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.693782 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" event={"ID":"0d4e5d0f-c610-483a-a7e0-92c39dce1b12","Type":"ContainerDied","Data":"a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c"} Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.710859 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.739995 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.761012 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.777680 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.792724 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.810085 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.826802 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.843136 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.856737 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.869644 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.879179 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.892234 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.908775 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:49 crc kubenswrapper[4899]: I1003 08:40:49.924028 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:50 crc kubenswrapper[4899]: I1003 08:40:50.700314 4899 generic.go:334] "Generic (PLEG): container finished" podID="0d4e5d0f-c610-483a-a7e0-92c39dce1b12" containerID="7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb" exitCode=0 Oct 03 08:40:50 crc kubenswrapper[4899]: I1003 08:40:50.700373 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" event={"ID":"0d4e5d0f-c610-483a-a7e0-92c39dce1b12","Type":"ContainerDied","Data":"7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb"} Oct 03 08:40:50 crc kubenswrapper[4899]: I1003 08:40:50.718252 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:50 crc kubenswrapper[4899]: I1003 08:40:50.734832 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:50 crc kubenswrapper[4899]: I1003 08:40:50.753018 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:50 crc kubenswrapper[4899]: I1003 08:40:50.772312 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:50 crc kubenswrapper[4899]: I1003 08:40:50.786873 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:50 crc kubenswrapper[4899]: I1003 08:40:50.803035 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:50 crc kubenswrapper[4899]: I1003 08:40:50.817131 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:50 crc kubenswrapper[4899]: I1003 08:40:50.832748 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:50 crc kubenswrapper[4899]: I1003 08:40:50.851974 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:50 crc kubenswrapper[4899]: I1003 08:40:50.866832 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:50 crc kubenswrapper[4899]: I1003 08:40:50.880855 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:50 crc kubenswrapper[4899]: I1003 08:40:50.895988 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:50 crc kubenswrapper[4899]: I1003 08:40:50.907617 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:50 crc kubenswrapper[4899]: I1003 08:40:50.921647 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.525886 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.526026 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:40:51 crc kubenswrapper[4899]: E1003 08:40:51.526146 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.526217 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:40:51 crc kubenswrapper[4899]: E1003 08:40:51.526292 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:40:51 crc kubenswrapper[4899]: E1003 08:40:51.526410 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.706080 4899 generic.go:334] "Generic (PLEG): container finished" podID="0d4e5d0f-c610-483a-a7e0-92c39dce1b12" containerID="ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07" exitCode=0 Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.706119 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" event={"ID":"0d4e5d0f-c610-483a-a7e0-92c39dce1b12","Type":"ContainerDied","Data":"ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07"} Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.719971 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.721071 4899 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.723439 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.723481 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.723493 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.723589 4899 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.731471 4899 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.731712 4899 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.732713 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.732747 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.732757 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.732771 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.732781 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:51Z","lastTransitionTime":"2025-10-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.735140 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: E1003 08:40:51.745832 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.749787 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.751877 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.751921 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.751931 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.751947 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.751957 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:51Z","lastTransitionTime":"2025-10-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.761162 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: E1003 08:40:51.764070 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.766963 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.766990 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.766999 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.767011 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.767020 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:51Z","lastTransitionTime":"2025-10-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.772356 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: E1003 08:40:51.777753 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.781040 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.781075 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.781083 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.781097 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.781110 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:51Z","lastTransitionTime":"2025-10-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.785574 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: E1003 08:40:51.794322 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.799093 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.800350 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.800381 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.800391 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.800406 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.800416 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:51Z","lastTransitionTime":"2025-10-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.812012 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: E1003 08:40:51.812820 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: E1003 08:40:51.812937 4899 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.814723 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.814748 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.814756 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.814769 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.814778 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:51Z","lastTransitionTime":"2025-10-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.832947 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.846050 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.856052 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.868315 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.880251 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.894236 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:51Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.918209 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.918253 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.918263 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.918282 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:51 crc kubenswrapper[4899]: I1003 08:40:51.918292 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:51Z","lastTransitionTime":"2025-10-03T08:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.020485 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.020528 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.020541 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.020576 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.020588 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:52Z","lastTransitionTime":"2025-10-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.122677 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.122706 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.122714 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.122727 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.122736 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:52Z","lastTransitionTime":"2025-10-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.226034 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.226078 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.226089 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.226107 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.226117 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:52Z","lastTransitionTime":"2025-10-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.328397 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.328452 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.328462 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.328479 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.328491 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:52Z","lastTransitionTime":"2025-10-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.431612 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.431657 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.431666 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.431682 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.431691 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:52Z","lastTransitionTime":"2025-10-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.534476 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.534541 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.534558 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.534584 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.534602 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:52Z","lastTransitionTime":"2025-10-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.637761 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.638018 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.638059 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.638087 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.638109 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:52Z","lastTransitionTime":"2025-10-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.715230 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerStarted","Data":"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e"} Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.718533 4899 generic.go:334] "Generic (PLEG): container finished" podID="0d4e5d0f-c610-483a-a7e0-92c39dce1b12" containerID="7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb" exitCode=0 Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.718567 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" event={"ID":"0d4e5d0f-c610-483a-a7e0-92c39dce1b12","Type":"ContainerDied","Data":"7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb"} Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.737423 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:52Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.741648 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.741708 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.741723 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.741755 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.741773 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:52Z","lastTransitionTime":"2025-10-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.752961 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:52Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.769653 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:52Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.782221 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:52Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.795098 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:52Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.809602 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:52Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.824253 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:52Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.837768 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:52Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.844021 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.844090 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.844108 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.844136 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.844149 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:52Z","lastTransitionTime":"2025-10-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.866506 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:52Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.878708 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:52Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.891951 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:52Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.904147 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:52Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.921816 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:52Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.942152 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:52Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.947866 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.947940 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.947951 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.947967 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:52 crc kubenswrapper[4899]: I1003 08:40:52.947979 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:52Z","lastTransitionTime":"2025-10-03T08:40:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.050269 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.050338 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.050349 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.050380 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.050396 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:53Z","lastTransitionTime":"2025-10-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.115420 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.115475 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.115507 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:40:53 crc kubenswrapper[4899]: E1003 08:40:53.115629 4899 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 08:40:53 crc kubenswrapper[4899]: E1003 08:40:53.115686 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 08:41:01.115669506 +0000 UTC m=+35.223154459 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 08:40:53 crc kubenswrapper[4899]: E1003 08:40:53.115762 4899 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 08:40:53 crc kubenswrapper[4899]: E1003 08:40:53.116004 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 08:41:01.115958166 +0000 UTC m=+35.223443249 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 08:40:53 crc kubenswrapper[4899]: E1003 08:40:53.116089 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 08:40:53 crc kubenswrapper[4899]: E1003 08:40:53.116144 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 08:40:53 crc kubenswrapper[4899]: E1003 08:40:53.116171 4899 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:53 crc kubenswrapper[4899]: E1003 08:40:53.116239 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 08:41:01.116220395 +0000 UTC m=+35.223705388 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.154109 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.154182 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.154202 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.154233 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.154253 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:53Z","lastTransitionTime":"2025-10-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.216554 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.216691 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:40:53 crc kubenswrapper[4899]: E1003 08:40:53.216843 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 08:40:53 crc kubenswrapper[4899]: E1003 08:40:53.216865 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 08:40:53 crc kubenswrapper[4899]: E1003 08:40:53.216878 4899 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:53 crc kubenswrapper[4899]: E1003 08:40:53.216949 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 08:41:01.216934471 +0000 UTC m=+35.324419434 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:40:53 crc kubenswrapper[4899]: E1003 08:40:53.217030 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:41:01.217020083 +0000 UTC m=+35.324505036 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.256880 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.256965 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.256979 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.256998 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.257011 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:53Z","lastTransitionTime":"2025-10-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.360154 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.360213 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.360228 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.360249 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.360260 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:53Z","lastTransitionTime":"2025-10-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.462131 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.462184 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.462194 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.462209 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.462219 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:53Z","lastTransitionTime":"2025-10-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.526263 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.526513 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.526547 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:40:53 crc kubenswrapper[4899]: E1003 08:40:53.526607 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:40:53 crc kubenswrapper[4899]: E1003 08:40:53.526683 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:40:53 crc kubenswrapper[4899]: E1003 08:40:53.526774 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.564967 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.565048 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.565059 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.565082 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.565099 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:53Z","lastTransitionTime":"2025-10-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.667750 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.667802 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.667813 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.667873 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.667886 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:53Z","lastTransitionTime":"2025-10-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.729525 4899 generic.go:334] "Generic (PLEG): container finished" podID="0d4e5d0f-c610-483a-a7e0-92c39dce1b12" containerID="5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f" exitCode=0 Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.729591 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" event={"ID":"0d4e5d0f-c610-483a-a7e0-92c39dce1b12","Type":"ContainerDied","Data":"5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f"} Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.746873 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.763702 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.770045 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.770078 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.770093 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.770112 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.770127 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:53Z","lastTransitionTime":"2025-10-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.778776 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.793765 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.809035 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.822602 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.838100 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.853925 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.867703 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.873423 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.873461 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.873470 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.873489 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.873499 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:53Z","lastTransitionTime":"2025-10-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.880289 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.893501 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.913160 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.927499 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.946625 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.976192 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.976223 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.976235 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.976248 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:53 crc kubenswrapper[4899]: I1003 08:40:53.976259 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:53Z","lastTransitionTime":"2025-10-03T08:40:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.078921 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.078960 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.078986 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.079001 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.079010 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:54Z","lastTransitionTime":"2025-10-03T08:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.181691 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.181731 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.181740 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.181756 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.181765 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:54Z","lastTransitionTime":"2025-10-03T08:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.284363 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.284425 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.284438 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.284461 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.284477 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:54Z","lastTransitionTime":"2025-10-03T08:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.386960 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.387000 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.387011 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.387027 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.387036 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:54Z","lastTransitionTime":"2025-10-03T08:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.489058 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.489118 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.489128 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.489148 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.489161 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:54Z","lastTransitionTime":"2025-10-03T08:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.592451 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.592494 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.592503 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.592521 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.592532 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:54Z","lastTransitionTime":"2025-10-03T08:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.695468 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.695528 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.695543 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.695566 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.695583 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:54Z","lastTransitionTime":"2025-10-03T08:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.736364 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerStarted","Data":"a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a"} Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.736721 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.741213 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" event={"ID":"0d4e5d0f-c610-483a-a7e0-92c39dce1b12","Type":"ContainerStarted","Data":"12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e"} Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.752721 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:54Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.767336 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:54Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.781082 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:54Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.798297 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.798417 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.798458 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.798471 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.798486 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.798497 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:54Z","lastTransitionTime":"2025-10-03T08:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.803797 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:54Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.816291 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:54Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.832821 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:54Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.847795 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:54Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.860015 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:54Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.877233 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:54Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.888151 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:54Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.898357 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:54Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.900776 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.900822 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.900834 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.900854 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.900865 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:54Z","lastTransitionTime":"2025-10-03T08:40:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.909771 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:54Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.921104 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:54Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.936171 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:54Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.948191 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:54Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.957725 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:54Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:54 crc kubenswrapper[4899]: I1003 08:40:54.974434 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:54Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.003673 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.003753 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.003859 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.003867 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.003880 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.003898 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:55Z","lastTransitionTime":"2025-10-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.022384 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.030780 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.040503 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.052447 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.063366 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.076128 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.093314 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.105856 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.106065 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.106096 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.106109 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.106129 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.106140 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:55Z","lastTransitionTime":"2025-10-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.118449 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.128479 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.208167 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.208202 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.208211 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.208226 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.208238 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:55Z","lastTransitionTime":"2025-10-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.310815 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.310861 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.310873 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.310909 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.310921 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:55Z","lastTransitionTime":"2025-10-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.413446 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.413489 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.413497 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.413511 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.413520 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:55Z","lastTransitionTime":"2025-10-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.516458 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.516508 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.516518 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.516557 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.516570 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:55Z","lastTransitionTime":"2025-10-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.526698 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.526724 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.526708 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:40:55 crc kubenswrapper[4899]: E1003 08:40:55.526813 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:40:55 crc kubenswrapper[4899]: E1003 08:40:55.526935 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:40:55 crc kubenswrapper[4899]: E1003 08:40:55.527003 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.619710 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.619761 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.619775 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.619792 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.619803 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:55Z","lastTransitionTime":"2025-10-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.722130 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.722165 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.722174 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.722188 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.722196 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:55Z","lastTransitionTime":"2025-10-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.743176 4899 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.743607 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.762262 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.775685 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.786315 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.798037 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.810031 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.822743 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.824475 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.824520 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.824532 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.824546 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.824554 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:55Z","lastTransitionTime":"2025-10-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.834515 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.851851 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.867405 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.881210 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.897389 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.908145 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.921492 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.926241 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.926282 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.926292 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.926310 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.926320 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:55Z","lastTransitionTime":"2025-10-03T08:40:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.933695 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:55 crc kubenswrapper[4899]: I1003 08:40:55.947222 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:55Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.028649 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.028694 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.028703 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.028717 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.028725 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:56Z","lastTransitionTime":"2025-10-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.131365 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.131411 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.131423 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.131440 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.131449 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:56Z","lastTransitionTime":"2025-10-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.234229 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.234282 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.234295 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.234309 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.234318 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:56Z","lastTransitionTime":"2025-10-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.337008 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.337056 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.337067 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.337084 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.337097 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:56Z","lastTransitionTime":"2025-10-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.441828 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.441875 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.441887 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.441927 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.441943 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:56Z","lastTransitionTime":"2025-10-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.542810 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.543729 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.543758 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.543766 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.543780 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.543789 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:56Z","lastTransitionTime":"2025-10-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.557942 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.576971 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.587788 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.598119 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.615224 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.624827 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.634480 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.645940 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.646184 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.646289 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.646383 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.646475 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:56Z","lastTransitionTime":"2025-10-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.647920 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.661068 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.673118 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.692432 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.706802 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.720195 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.745218 4899 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.748792 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.748843 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.748856 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.748871 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.748881 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:56Z","lastTransitionTime":"2025-10-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.850779 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.850825 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.850834 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.850849 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.850858 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:56Z","lastTransitionTime":"2025-10-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.953359 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.953392 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.953400 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.953414 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:56 crc kubenswrapper[4899]: I1003 08:40:56.953422 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:56Z","lastTransitionTime":"2025-10-03T08:40:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.055567 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.055608 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.055616 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.055631 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.055643 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:57Z","lastTransitionTime":"2025-10-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.157544 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.157574 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.157590 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.157606 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.157616 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:57Z","lastTransitionTime":"2025-10-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.259780 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.259834 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.259847 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.259862 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.259871 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:57Z","lastTransitionTime":"2025-10-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.361954 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.361990 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.362000 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.362014 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.362023 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:57Z","lastTransitionTime":"2025-10-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.464295 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.464359 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.464372 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.464389 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.464401 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:57Z","lastTransitionTime":"2025-10-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.526165 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.526212 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.526254 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:40:57 crc kubenswrapper[4899]: E1003 08:40:57.526307 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:40:57 crc kubenswrapper[4899]: E1003 08:40:57.526388 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:40:57 crc kubenswrapper[4899]: E1003 08:40:57.526509 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.567217 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.567252 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.567261 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.567275 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.567284 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:57Z","lastTransitionTime":"2025-10-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.670823 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.670853 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.670861 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.670873 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.670883 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:57Z","lastTransitionTime":"2025-10-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.749159 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovnkube-controller/0.log" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.751215 4899 generic.go:334] "Generic (PLEG): container finished" podID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerID="a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a" exitCode=1 Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.751256 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerDied","Data":"a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a"} Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.752065 4899 scope.go:117] "RemoveContainer" containerID="a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.765702 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.774035 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.774085 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.774123 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.774143 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.774154 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:57Z","lastTransitionTime":"2025-10-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.779056 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.792101 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.805122 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.815033 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.827713 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.838535 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.849990 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.867698 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"message\\\":\\\"ice/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 08:40:57.049325 6209 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 08:40:57.049369 6209 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 08:40:57.049416 6209 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 08:40:57.049441 6209 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 08:40:57.049477 6209 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 08:40:57.049511 6209 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1003 08:40:57.049531 6209 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 08:40:57.049539 6209 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 08:40:57.049549 6209 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 08:40:57.049558 6209 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1003 08:40:57.049559 6209 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 08:40:57.049617 6209 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1003 08:40:57.049646 6209 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1003 08:40:57.049687 6209 factory.go:656] Stopping watch factory\\\\nI1003 08:40:57.049718 6209 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.876025 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.876077 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.876086 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.876118 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.876129 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:57Z","lastTransitionTime":"2025-10-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.878909 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.889289 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.900519 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.916606 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.930353 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.931977 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.950435 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"message\\\":\\\"ice/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 08:40:57.049325 6209 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 08:40:57.049369 6209 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 08:40:57.049416 6209 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 08:40:57.049441 6209 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 08:40:57.049477 6209 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 08:40:57.049511 6209 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1003 08:40:57.049531 6209 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 08:40:57.049539 6209 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 08:40:57.049549 6209 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 08:40:57.049558 6209 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1003 08:40:57.049559 6209 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 08:40:57.049617 6209 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1003 08:40:57.049646 6209 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1003 08:40:57.049687 6209 factory.go:656] Stopping watch factory\\\\nI1003 08:40:57.049718 6209 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.962041 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.974440 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.977752 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.977802 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.977814 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.977863 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.977875 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:57Z","lastTransitionTime":"2025-10-03T08:40:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.984207 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:57 crc kubenswrapper[4899]: I1003 08:40:57.995010 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:57Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.003028 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.012546 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.022568 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.035261 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.045640 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.057581 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.070075 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.079473 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.079541 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.079555 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.079571 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.079601 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:58Z","lastTransitionTime":"2025-10-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.080960 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.095728 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.182808 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.182855 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.182863 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.182879 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.182903 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:58Z","lastTransitionTime":"2025-10-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.285348 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.285390 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.285398 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.285415 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.285442 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:58Z","lastTransitionTime":"2025-10-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.387887 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.387939 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.387949 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.387963 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.387974 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:58Z","lastTransitionTime":"2025-10-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.490185 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.490248 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.490261 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.490283 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.490298 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:58Z","lastTransitionTime":"2025-10-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.592380 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.592419 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.592428 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.592444 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.592454 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:58Z","lastTransitionTime":"2025-10-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.593327 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x"] Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.594384 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.597141 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.597393 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.611622 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.621548 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.636152 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.650401 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.663023 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.679606 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.691879 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.694950 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.695009 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.695030 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.695059 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.695078 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:58Z","lastTransitionTime":"2025-10-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.707419 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.721047 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.732599 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.749681 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.758423 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovnkube-controller/0.log" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.762048 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerStarted","Data":"d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c"} Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.762190 4899 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.774340 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bf29\" (UniqueName: \"kubernetes.io/projected/59642124-309c-4d11-9965-c47f9b123e27-kube-api-access-2bf29\") pod \"ovnkube-control-plane-749d76644c-hrq4x\" (UID: \"59642124-309c-4d11-9965-c47f9b123e27\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.774398 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/59642124-309c-4d11-9965-c47f9b123e27-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hrq4x\" (UID: \"59642124-309c-4d11-9965-c47f9b123e27\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.774632 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/59642124-309c-4d11-9965-c47f9b123e27-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hrq4x\" (UID: \"59642124-309c-4d11-9965-c47f9b123e27\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.774800 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/59642124-309c-4d11-9965-c47f9b123e27-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hrq4x\" (UID: \"59642124-309c-4d11-9965-c47f9b123e27\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.776935 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"message\\\":\\\"ice/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 08:40:57.049325 6209 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 08:40:57.049369 6209 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 08:40:57.049416 6209 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 08:40:57.049441 6209 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 08:40:57.049477 6209 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 08:40:57.049511 6209 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1003 08:40:57.049531 6209 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 08:40:57.049539 6209 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 08:40:57.049549 6209 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 08:40:57.049558 6209 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1003 08:40:57.049559 6209 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 08:40:57.049617 6209 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1003 08:40:57.049646 6209 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1003 08:40:57.049687 6209 factory.go:656] Stopping watch factory\\\\nI1003 08:40:57.049718 6209 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.793057 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.796998 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.797031 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.797043 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.797059 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.797069 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:58Z","lastTransitionTime":"2025-10-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.805203 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.814773 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.826087 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.837071 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.849366 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.867199 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.876008 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/59642124-309c-4d11-9965-c47f9b123e27-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hrq4x\" (UID: \"59642124-309c-4d11-9965-c47f9b123e27\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.876105 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/59642124-309c-4d11-9965-c47f9b123e27-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hrq4x\" (UID: \"59642124-309c-4d11-9965-c47f9b123e27\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.876153 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/59642124-309c-4d11-9965-c47f9b123e27-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hrq4x\" (UID: \"59642124-309c-4d11-9965-c47f9b123e27\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.876186 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bf29\" (UniqueName: \"kubernetes.io/projected/59642124-309c-4d11-9965-c47f9b123e27-kube-api-access-2bf29\") pod \"ovnkube-control-plane-749d76644c-hrq4x\" (UID: \"59642124-309c-4d11-9965-c47f9b123e27\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.877231 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/59642124-309c-4d11-9965-c47f9b123e27-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hrq4x\" (UID: \"59642124-309c-4d11-9965-c47f9b123e27\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.877355 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/59642124-309c-4d11-9965-c47f9b123e27-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hrq4x\" (UID: \"59642124-309c-4d11-9965-c47f9b123e27\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.878926 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.883104 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/59642124-309c-4d11-9965-c47f9b123e27-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hrq4x\" (UID: \"59642124-309c-4d11-9965-c47f9b123e27\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.890195 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.891091 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bf29\" (UniqueName: \"kubernetes.io/projected/59642124-309c-4d11-9965-c47f9b123e27-kube-api-access-2bf29\") pod \"ovnkube-control-plane-749d76644c-hrq4x\" (UID: \"59642124-309c-4d11-9965-c47f9b123e27\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.899390 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.899418 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.899427 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.899443 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.899452 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:58Z","lastTransitionTime":"2025-10-03T08:40:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.907685 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.921613 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"message\\\":\\\"ice/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 08:40:57.049325 6209 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 08:40:57.049369 6209 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 08:40:57.049416 6209 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 08:40:57.049441 6209 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 08:40:57.049477 6209 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 08:40:57.049511 6209 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1003 08:40:57.049531 6209 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 08:40:57.049539 6209 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 08:40:57.049549 6209 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 08:40:57.049558 6209 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1003 08:40:57.049559 6209 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 08:40:57.049617 6209 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1003 08:40:57.049646 6209 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1003 08:40:57.049687 6209 factory.go:656] Stopping watch factory\\\\nI1003 08:40:57.049718 6209 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: W1003 08:40:58.926223 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59642124_309c_4d11_9965_c47f9b123e27.slice/crio-a20c65ae670dfebee0bb84b2ae49b000882d96c277abbc674eca88bd6401ca72 WatchSource:0}: Error finding container a20c65ae670dfebee0bb84b2ae49b000882d96c277abbc674eca88bd6401ca72: Status 404 returned error can't find the container with id a20c65ae670dfebee0bb84b2ae49b000882d96c277abbc674eca88bd6401ca72 Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.938458 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.956144 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.975550 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.986214 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:58 crc kubenswrapper[4899]: I1003 08:40:58.999202 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:58Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.002467 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.002498 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.002510 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.002524 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.002533 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:59Z","lastTransitionTime":"2025-10-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.013428 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.026231 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.043644 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.104847 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.105537 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.105558 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.105575 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.105588 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:59Z","lastTransitionTime":"2025-10-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.209339 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.209456 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.209481 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.209518 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.209578 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:59Z","lastTransitionTime":"2025-10-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.311848 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.311923 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.311934 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.311954 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.311964 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:59Z","lastTransitionTime":"2025-10-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.414374 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.414415 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.414425 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.414444 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.414457 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:59Z","lastTransitionTime":"2025-10-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.517761 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.517821 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.517837 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.517858 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.517869 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:59Z","lastTransitionTime":"2025-10-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.526107 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.526171 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.526108 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:40:59 crc kubenswrapper[4899]: E1003 08:40:59.526307 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:40:59 crc kubenswrapper[4899]: E1003 08:40:59.526396 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:40:59 crc kubenswrapper[4899]: E1003 08:40:59.526516 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.620562 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.620611 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.620619 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.620638 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.620646 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:59Z","lastTransitionTime":"2025-10-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.722677 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.722722 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.722734 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.722758 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.722768 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:59Z","lastTransitionTime":"2025-10-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.765608 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" event={"ID":"59642124-309c-4d11-9965-c47f9b123e27","Type":"ContainerStarted","Data":"fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2"} Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.765666 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" event={"ID":"59642124-309c-4d11-9965-c47f9b123e27","Type":"ContainerStarted","Data":"eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1"} Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.765680 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" event={"ID":"59642124-309c-4d11-9965-c47f9b123e27","Type":"ContainerStarted","Data":"a20c65ae670dfebee0bb84b2ae49b000882d96c277abbc674eca88bd6401ca72"} Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.767763 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovnkube-controller/1.log" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.768298 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovnkube-controller/0.log" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.770527 4899 generic.go:334] "Generic (PLEG): container finished" podID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerID="d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c" exitCode=1 Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.770603 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerDied","Data":"d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c"} Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.770662 4899 scope.go:117] "RemoveContainer" containerID="a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.771208 4899 scope.go:117] "RemoveContainer" containerID="d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c" Oct 03 08:40:59 crc kubenswrapper[4899]: E1003 08:40:59.771367 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.777914 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.790283 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.801206 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.812933 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.825679 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.825718 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.825727 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.825743 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.825755 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:59Z","lastTransitionTime":"2025-10-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.828324 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.846335 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"message\\\":\\\"ice/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 08:40:57.049325 6209 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 08:40:57.049369 6209 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 08:40:57.049416 6209 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 08:40:57.049441 6209 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 08:40:57.049477 6209 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 08:40:57.049511 6209 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1003 08:40:57.049531 6209 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 08:40:57.049539 6209 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 08:40:57.049549 6209 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 08:40:57.049558 6209 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1003 08:40:57.049559 6209 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 08:40:57.049617 6209 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1003 08:40:57.049646 6209 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1003 08:40:57.049687 6209 factory.go:656] Stopping watch factory\\\\nI1003 08:40:57.049718 6209 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.863234 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.875803 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.894581 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.906500 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.915277 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.926293 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.928423 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.928463 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.928472 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.928488 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.928498 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:40:59Z","lastTransitionTime":"2025-10-03T08:40:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.938456 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.950332 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.965368 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.975898 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:40:59 crc kubenswrapper[4899]: I1003 08:40:59.987325 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.000243 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:40:59Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.010531 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.023086 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.031672 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.031739 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.031758 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.031784 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.031804 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:00Z","lastTransitionTime":"2025-10-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.040005 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.054210 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.069310 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.084474 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-ldv5d"] Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.085231 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:00 crc kubenswrapper[4899]: E1003 08:41:00.085329 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.089401 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"message\\\":\\\"ice/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 08:40:57.049325 6209 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 08:40:57.049369 6209 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 08:40:57.049416 6209 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 08:40:57.049441 6209 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 08:40:57.049477 6209 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 08:40:57.049511 6209 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1003 08:40:57.049531 6209 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 08:40:57.049539 6209 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 08:40:57.049549 6209 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 08:40:57.049558 6209 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1003 08:40:57.049559 6209 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 08:40:57.049617 6209 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1003 08:40:57.049646 6209 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1003 08:40:57.049687 6209 factory.go:656] Stopping watch factory\\\\nI1003 08:40:57.049718 6209 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"message\\\":\\\"hift-ovn-kubernetes/ovnkube-node-g7f7c openshift-image-registry/node-ca-jpgn4 openshift-machine-config-operator/machine-config-daemon-t2h4g]\\\\nI1003 08:40:58.763384 6353 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1003 08:40:58.763387 6353 lb_config.go:1031] Cluster endpoints for openshift-operator-lifecycle-manager/olm-operator-metrics for network=default are: map[]\\\\nI1003 08:40:58.763399 6353 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-t2h4g\\\\nI1003 08:40:58.763410 6353 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-t2h4g\\\\nI1003 08:40:58.763418 6353 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-t2h4g in node crc\\\\nI1003 08:40:58.763410 6353 services_controller.go:443] Built service openshift-operator-lifecycle-manager/olm-operator-metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.168\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1003 08:40:58.763423 6353 obj\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.105805 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.118705 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.131376 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.133944 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.133987 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.133996 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.134011 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.134022 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:00Z","lastTransitionTime":"2025-10-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.145945 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.160700 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.175407 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.190230 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5pv9\" (UniqueName: \"kubernetes.io/projected/27fd79a9-c016-46aa-8b67-446a831eb2d8-kube-api-access-c5pv9\") pod \"network-metrics-daemon-ldv5d\" (UID: \"27fd79a9-c016-46aa-8b67-446a831eb2d8\") " pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.190298 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs\") pod \"network-metrics-daemon-ldv5d\" (UID: \"27fd79a9-c016-46aa-8b67-446a831eb2d8\") " pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.192984 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.211819 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.225348 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.236826 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.236867 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.236878 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.236905 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.236917 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:00Z","lastTransitionTime":"2025-10-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.241840 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.255974 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27fd79a9-c016-46aa-8b67-446a831eb2d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:41:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ldv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.270853 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.284535 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.291155 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5pv9\" (UniqueName: \"kubernetes.io/projected/27fd79a9-c016-46aa-8b67-446a831eb2d8-kube-api-access-c5pv9\") pod \"network-metrics-daemon-ldv5d\" (UID: \"27fd79a9-c016-46aa-8b67-446a831eb2d8\") " pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.291233 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs\") pod \"network-metrics-daemon-ldv5d\" (UID: \"27fd79a9-c016-46aa-8b67-446a831eb2d8\") " pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:00 crc kubenswrapper[4899]: E1003 08:41:00.291393 4899 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 08:41:00 crc kubenswrapper[4899]: E1003 08:41:00.291463 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs podName:27fd79a9-c016-46aa-8b67-446a831eb2d8 nodeName:}" failed. No retries permitted until 2025-10-03 08:41:00.791446417 +0000 UTC m=+34.898931370 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs") pod "network-metrics-daemon-ldv5d" (UID: "27fd79a9-c016-46aa-8b67-446a831eb2d8") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.298866 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.309126 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5pv9\" (UniqueName: \"kubernetes.io/projected/27fd79a9-c016-46aa-8b67-446a831eb2d8-kube-api-access-c5pv9\") pod \"network-metrics-daemon-ldv5d\" (UID: \"27fd79a9-c016-46aa-8b67-446a831eb2d8\") " pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.323296 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a086b6d2d1f390609aac777c2055644b4e3d63736f02ac21c5ff6016c4df750a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"message\\\":\\\"ice/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 08:40:57.049325 6209 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 08:40:57.049369 6209 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 08:40:57.049416 6209 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 08:40:57.049441 6209 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 08:40:57.049477 6209 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 08:40:57.049511 6209 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1003 08:40:57.049531 6209 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 08:40:57.049539 6209 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 08:40:57.049549 6209 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 08:40:57.049558 6209 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1003 08:40:57.049559 6209 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 08:40:57.049617 6209 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1003 08:40:57.049646 6209 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1003 08:40:57.049687 6209 factory.go:656] Stopping watch factory\\\\nI1003 08:40:57.049718 6209 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"message\\\":\\\"hift-ovn-kubernetes/ovnkube-node-g7f7c openshift-image-registry/node-ca-jpgn4 openshift-machine-config-operator/machine-config-daemon-t2h4g]\\\\nI1003 08:40:58.763384 6353 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1003 08:40:58.763387 6353 lb_config.go:1031] Cluster endpoints for openshift-operator-lifecycle-manager/olm-operator-metrics for network=default are: map[]\\\\nI1003 08:40:58.763399 6353 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-t2h4g\\\\nI1003 08:40:58.763410 6353 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-t2h4g\\\\nI1003 08:40:58.763418 6353 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-t2h4g in node crc\\\\nI1003 08:40:58.763410 6353 services_controller.go:443] Built service openshift-operator-lifecycle-manager/olm-operator-metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.168\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1003 08:40:58.763423 6353 obj\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.338986 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.339053 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.339064 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.339100 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.339111 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:00Z","lastTransitionTime":"2025-10-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.340575 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.352673 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.365496 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.374960 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.386468 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.399772 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.412123 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.442193 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.442237 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.442249 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.442266 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.442277 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:00Z","lastTransitionTime":"2025-10-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.545402 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.545471 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.545486 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.545511 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.545526 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:00Z","lastTransitionTime":"2025-10-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.648083 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.648147 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.648159 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.648184 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.648200 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:00Z","lastTransitionTime":"2025-10-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.752864 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.752939 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.752952 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.752970 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.752984 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:00Z","lastTransitionTime":"2025-10-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.778422 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovnkube-controller/1.log" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.781445 4899 scope.go:117] "RemoveContainer" containerID="d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c" Oct 03 08:41:00 crc kubenswrapper[4899]: E1003 08:41:00.781597 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.793534 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.796733 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs\") pod \"network-metrics-daemon-ldv5d\" (UID: \"27fd79a9-c016-46aa-8b67-446a831eb2d8\") " pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:00 crc kubenswrapper[4899]: E1003 08:41:00.796919 4899 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 08:41:00 crc kubenswrapper[4899]: E1003 08:41:00.796973 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs podName:27fd79a9-c016-46aa-8b67-446a831eb2d8 nodeName:}" failed. No retries permitted until 2025-10-03 08:41:01.796958565 +0000 UTC m=+35.904443518 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs") pod "network-metrics-daemon-ldv5d" (UID: "27fd79a9-c016-46aa-8b67-446a831eb2d8") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.805200 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.815098 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.847887 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.854876 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.854931 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.854943 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.854960 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.854971 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:00Z","lastTransitionTime":"2025-10-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.867384 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27fd79a9-c016-46aa-8b67-446a831eb2d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:41:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ldv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.880630 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.891007 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.903418 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.921873 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"message\\\":\\\"hift-ovn-kubernetes/ovnkube-node-g7f7c openshift-image-registry/node-ca-jpgn4 openshift-machine-config-operator/machine-config-daemon-t2h4g]\\\\nI1003 08:40:58.763384 6353 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1003 08:40:58.763387 6353 lb_config.go:1031] Cluster endpoints for openshift-operator-lifecycle-manager/olm-operator-metrics for network=default are: map[]\\\\nI1003 08:40:58.763399 6353 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-t2h4g\\\\nI1003 08:40:58.763410 6353 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-t2h4g\\\\nI1003 08:40:58.763418 6353 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-t2h4g in node crc\\\\nI1003 08:40:58.763410 6353 services_controller.go:443] Built service openshift-operator-lifecycle-manager/olm-operator-metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.168\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1003 08:40:58.763423 6353 obj\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.935131 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.945538 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.957012 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.957581 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.957626 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.957637 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.957653 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.957665 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:00Z","lastTransitionTime":"2025-10-03T08:41:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.966421 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.978132 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:00 crc kubenswrapper[4899]: I1003 08:41:00.990445 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.001513 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:00Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.060216 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.060254 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.060263 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.060279 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.060291 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:01Z","lastTransitionTime":"2025-10-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.162401 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.162436 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.162443 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.162456 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.162465 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:01Z","lastTransitionTime":"2025-10-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.201049 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.201097 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.201120 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.201219 4899 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.201269 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.201289 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.201302 4899 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.201308 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 08:41:17.20128951 +0000 UTC m=+51.308774513 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.201353 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 08:41:17.201342411 +0000 UTC m=+51.308827414 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.201225 4899 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.201499 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 08:41:17.201483296 +0000 UTC m=+51.308968249 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.264882 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.264946 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.264958 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.264975 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.264988 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:01Z","lastTransitionTime":"2025-10-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.302318 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.302463 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.302509 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:41:17.302481992 +0000 UTC m=+51.409966945 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.302601 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.302624 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.302637 4899 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.302686 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 08:41:17.302674679 +0000 UTC m=+51.410159632 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.367106 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.367147 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.367159 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.367175 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.367186 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:01Z","lastTransitionTime":"2025-10-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.468731 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.468783 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.468792 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.468807 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.468818 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:01Z","lastTransitionTime":"2025-10-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.526563 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.526702 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.526804 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.526746 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.526726 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.527063 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.527125 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.527199 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.571032 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.571089 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.571102 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.571117 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.571129 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:01Z","lastTransitionTime":"2025-10-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.673794 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.673929 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.673949 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.673987 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.674007 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:01Z","lastTransitionTime":"2025-10-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.776613 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.776651 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.776661 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.776677 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.776688 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:01Z","lastTransitionTime":"2025-10-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.806570 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs\") pod \"network-metrics-daemon-ldv5d\" (UID: \"27fd79a9-c016-46aa-8b67-446a831eb2d8\") " pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.806730 4899 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 08:41:01 crc kubenswrapper[4899]: E1003 08:41:01.807305 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs podName:27fd79a9-c016-46aa-8b67-446a831eb2d8 nodeName:}" failed. No retries permitted until 2025-10-03 08:41:03.807275577 +0000 UTC m=+37.914760530 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs") pod "network-metrics-daemon-ldv5d" (UID: "27fd79a9-c016-46aa-8b67-446a831eb2d8") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.879531 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.879575 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.879584 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.879606 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.879616 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:01Z","lastTransitionTime":"2025-10-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.981837 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.982255 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.982339 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.982443 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:01 crc kubenswrapper[4899]: I1003 08:41:01.982535 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:01Z","lastTransitionTime":"2025-10-03T08:41:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.084679 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.084721 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.084734 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.084751 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.084763 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:02Z","lastTransitionTime":"2025-10-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.187271 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.187317 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.187326 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.187341 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.187350 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:02Z","lastTransitionTime":"2025-10-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.211606 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.211681 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.211694 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.211732 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.211743 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:02Z","lastTransitionTime":"2025-10-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:02 crc kubenswrapper[4899]: E1003 08:41:02.222819 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:02Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.226179 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.226209 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.226222 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.226237 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.226247 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:02Z","lastTransitionTime":"2025-10-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:02 crc kubenswrapper[4899]: E1003 08:41:02.236088 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:02Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.238579 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.238613 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.238621 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.238634 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.238644 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:02Z","lastTransitionTime":"2025-10-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:02 crc kubenswrapper[4899]: E1003 08:41:02.249094 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:02Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.252243 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.252275 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.252283 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.252297 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.252308 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:02Z","lastTransitionTime":"2025-10-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:02 crc kubenswrapper[4899]: E1003 08:41:02.263242 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:02Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.266153 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.266278 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.266355 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.266439 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.266508 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:02Z","lastTransitionTime":"2025-10-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:02 crc kubenswrapper[4899]: E1003 08:41:02.276817 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:02Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:02 crc kubenswrapper[4899]: E1003 08:41:02.276937 4899 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.289022 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.289058 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.289068 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.289084 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.289092 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:02Z","lastTransitionTime":"2025-10-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.391222 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.391256 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.391264 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.391280 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.391290 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:02Z","lastTransitionTime":"2025-10-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.493084 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.493124 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.493136 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.493152 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.493162 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:02Z","lastTransitionTime":"2025-10-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.595971 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.596035 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.596046 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.596062 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.596073 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:02Z","lastTransitionTime":"2025-10-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.698411 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.698446 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.698454 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.698468 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.698480 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:02Z","lastTransitionTime":"2025-10-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.800275 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.800314 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.800324 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.800338 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.800350 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:02Z","lastTransitionTime":"2025-10-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.902457 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.902490 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.902498 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.902511 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:02 crc kubenswrapper[4899]: I1003 08:41:02.902520 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:02Z","lastTransitionTime":"2025-10-03T08:41:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.005199 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.005462 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.005570 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.005685 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.005773 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:03Z","lastTransitionTime":"2025-10-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.107394 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.107429 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.107439 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.107451 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.107461 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:03Z","lastTransitionTime":"2025-10-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.209378 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.209417 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.209426 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.209439 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.209448 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:03Z","lastTransitionTime":"2025-10-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.311584 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.311616 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.311625 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.311638 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.311660 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:03Z","lastTransitionTime":"2025-10-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.413748 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.413778 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.413787 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.413799 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.413807 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:03Z","lastTransitionTime":"2025-10-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.517093 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.517153 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.517164 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.517180 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.517190 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:03Z","lastTransitionTime":"2025-10-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.526736 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.526879 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:03 crc kubenswrapper[4899]: E1003 08:41:03.526990 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.527100 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:03 crc kubenswrapper[4899]: E1003 08:41:03.527227 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.527306 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:03 crc kubenswrapper[4899]: E1003 08:41:03.527373 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:03 crc kubenswrapper[4899]: E1003 08:41:03.527424 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.620351 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.620434 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.620458 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.620491 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.620517 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:03Z","lastTransitionTime":"2025-10-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.723176 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.723242 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.723258 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.723283 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.723303 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:03Z","lastTransitionTime":"2025-10-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.827390 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.827484 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.827508 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.827543 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.827566 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:03Z","lastTransitionTime":"2025-10-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.832610 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs\") pod \"network-metrics-daemon-ldv5d\" (UID: \"27fd79a9-c016-46aa-8b67-446a831eb2d8\") " pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:03 crc kubenswrapper[4899]: E1003 08:41:03.832962 4899 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 08:41:03 crc kubenswrapper[4899]: E1003 08:41:03.833112 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs podName:27fd79a9-c016-46aa-8b67-446a831eb2d8 nodeName:}" failed. No retries permitted until 2025-10-03 08:41:07.833070888 +0000 UTC m=+41.940555991 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs") pod "network-metrics-daemon-ldv5d" (UID: "27fd79a9-c016-46aa-8b67-446a831eb2d8") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.931837 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.931966 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.931995 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.932028 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:03 crc kubenswrapper[4899]: I1003 08:41:03.932048 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:03Z","lastTransitionTime":"2025-10-03T08:41:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.035307 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.035368 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.035382 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.035403 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.035419 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:04Z","lastTransitionTime":"2025-10-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.139398 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.139485 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.139511 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.139545 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.139569 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:04Z","lastTransitionTime":"2025-10-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.242614 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.242665 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.242684 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.242701 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.242716 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:04Z","lastTransitionTime":"2025-10-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.345471 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.345503 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.345511 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.345551 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.345570 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:04Z","lastTransitionTime":"2025-10-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.449227 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.449312 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.449327 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.449347 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.449363 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:04Z","lastTransitionTime":"2025-10-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.552676 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.552713 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.552722 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.552740 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.552752 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:04Z","lastTransitionTime":"2025-10-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.654681 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.654724 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.654736 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.654751 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.654763 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:04Z","lastTransitionTime":"2025-10-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.756504 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.756538 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.756552 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.756569 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.756583 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:04Z","lastTransitionTime":"2025-10-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.858699 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.858740 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.858752 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.858769 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.858782 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:04Z","lastTransitionTime":"2025-10-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.961220 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.961276 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.961285 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.961298 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:04 crc kubenswrapper[4899]: I1003 08:41:04.961306 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:04Z","lastTransitionTime":"2025-10-03T08:41:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.063820 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.063880 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.063902 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.063916 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.063927 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:05Z","lastTransitionTime":"2025-10-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.166263 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.166336 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.166356 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.166380 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.166393 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:05Z","lastTransitionTime":"2025-10-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.268351 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.268406 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.268420 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.268437 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.268446 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:05Z","lastTransitionTime":"2025-10-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.370625 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.370657 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.370665 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.370679 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.370689 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:05Z","lastTransitionTime":"2025-10-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.472931 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.472973 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.472983 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.473000 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.473010 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:05Z","lastTransitionTime":"2025-10-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.526475 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.526507 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.526618 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:05 crc kubenswrapper[4899]: E1003 08:41:05.526617 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:05 crc kubenswrapper[4899]: E1003 08:41:05.526720 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.526760 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:05 crc kubenswrapper[4899]: E1003 08:41:05.526808 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:05 crc kubenswrapper[4899]: E1003 08:41:05.526848 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.575582 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.575627 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.575638 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.575655 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.575668 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:05Z","lastTransitionTime":"2025-10-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.677319 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.677355 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.677365 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.677380 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.677390 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:05Z","lastTransitionTime":"2025-10-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.779548 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.779598 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.779606 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.779621 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.779630 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:05Z","lastTransitionTime":"2025-10-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.882379 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.882440 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.882449 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.882464 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.882475 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:05Z","lastTransitionTime":"2025-10-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.984715 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.984760 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.984772 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.984790 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:05 crc kubenswrapper[4899]: I1003 08:41:05.984801 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:05Z","lastTransitionTime":"2025-10-03T08:41:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.086560 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.086604 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.086613 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.086626 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.086636 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:06Z","lastTransitionTime":"2025-10-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.188467 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.188512 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.188554 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.188571 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.188582 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:06Z","lastTransitionTime":"2025-10-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.291078 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.291137 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.291146 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.291162 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.291173 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:06Z","lastTransitionTime":"2025-10-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.393284 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.393326 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.393337 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.393354 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.393366 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:06Z","lastTransitionTime":"2025-10-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.495613 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.495925 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.495936 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.495949 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.495958 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:06Z","lastTransitionTime":"2025-10-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.537310 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.548180 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.557218 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.567001 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.579532 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.591373 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.598079 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.598100 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.598110 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.598122 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.598132 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:06Z","lastTransitionTime":"2025-10-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.601480 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.614381 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.623974 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.633248 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.642512 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27fd79a9-c016-46aa-8b67-446a831eb2d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:41:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ldv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.652942 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.664113 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.673880 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.689675 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"message\\\":\\\"hift-ovn-kubernetes/ovnkube-node-g7f7c openshift-image-registry/node-ca-jpgn4 openshift-machine-config-operator/machine-config-daemon-t2h4g]\\\\nI1003 08:40:58.763384 6353 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1003 08:40:58.763387 6353 lb_config.go:1031] Cluster endpoints for openshift-operator-lifecycle-manager/olm-operator-metrics for network=default are: map[]\\\\nI1003 08:40:58.763399 6353 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-t2h4g\\\\nI1003 08:40:58.763410 6353 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-t2h4g\\\\nI1003 08:40:58.763418 6353 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-t2h4g in node crc\\\\nI1003 08:40:58.763410 6353 services_controller.go:443] Built service openshift-operator-lifecycle-manager/olm-operator-metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.168\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1003 08:40:58.763423 6353 obj\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.700150 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.700204 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.700213 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.700227 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.700236 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:06Z","lastTransitionTime":"2025-10-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.702824 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.802249 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.802287 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.802295 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.802308 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.802316 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:06Z","lastTransitionTime":"2025-10-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.904555 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.904591 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.904603 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.904621 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:06 crc kubenswrapper[4899]: I1003 08:41:06.904632 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:06Z","lastTransitionTime":"2025-10-03T08:41:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.007499 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.007549 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.007559 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.007579 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.007591 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:07Z","lastTransitionTime":"2025-10-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.114600 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.114647 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.114658 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.114675 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.114687 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:07Z","lastTransitionTime":"2025-10-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.217834 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.217881 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.217909 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.217924 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.217934 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:07Z","lastTransitionTime":"2025-10-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.321242 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.321300 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.321312 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.321326 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.321338 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:07Z","lastTransitionTime":"2025-10-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.413681 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.414452 4899 scope.go:117] "RemoveContainer" containerID="d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c" Oct 03 08:41:07 crc kubenswrapper[4899]: E1003 08:41:07.414618 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.424172 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.424226 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.424237 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.424256 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.424275 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:07Z","lastTransitionTime":"2025-10-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.526085 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.526150 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.526093 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.526248 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:07 crc kubenswrapper[4899]: E1003 08:41:07.526330 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:07 crc kubenswrapper[4899]: E1003 08:41:07.526403 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:07 crc kubenswrapper[4899]: E1003 08:41:07.526504 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:07 crc kubenswrapper[4899]: E1003 08:41:07.526731 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.528295 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.528333 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.528345 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.528365 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.528378 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:07Z","lastTransitionTime":"2025-10-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.631600 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.631654 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.631666 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.631688 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.631701 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:07Z","lastTransitionTime":"2025-10-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.735165 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.735226 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.735242 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.735264 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.735278 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:07Z","lastTransitionTime":"2025-10-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.837884 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.837979 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.837990 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.838010 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.838022 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:07Z","lastTransitionTime":"2025-10-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.873013 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs\") pod \"network-metrics-daemon-ldv5d\" (UID: \"27fd79a9-c016-46aa-8b67-446a831eb2d8\") " pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:07 crc kubenswrapper[4899]: E1003 08:41:07.873209 4899 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 08:41:07 crc kubenswrapper[4899]: E1003 08:41:07.873292 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs podName:27fd79a9-c016-46aa-8b67-446a831eb2d8 nodeName:}" failed. No retries permitted until 2025-10-03 08:41:15.873269917 +0000 UTC m=+49.980754870 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs") pod "network-metrics-daemon-ldv5d" (UID: "27fd79a9-c016-46aa-8b67-446a831eb2d8") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.941199 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.941251 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.941266 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.941286 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:07 crc kubenswrapper[4899]: I1003 08:41:07.941299 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:07Z","lastTransitionTime":"2025-10-03T08:41:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.044606 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.044696 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.044723 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.044757 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.044781 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:08Z","lastTransitionTime":"2025-10-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.146958 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.146986 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.146994 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.147007 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.147017 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:08Z","lastTransitionTime":"2025-10-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.250479 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.250536 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.250547 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.250565 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.250578 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:08Z","lastTransitionTime":"2025-10-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.354274 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.354323 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.354331 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.354345 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.354354 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:08Z","lastTransitionTime":"2025-10-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.457710 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.457757 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.457774 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.457794 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.457807 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:08Z","lastTransitionTime":"2025-10-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.561263 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.561306 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.561326 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.561345 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.561358 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:08Z","lastTransitionTime":"2025-10-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.664715 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.664777 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.664788 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.664805 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.664816 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:08Z","lastTransitionTime":"2025-10-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.768076 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.768146 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.768159 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.768181 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.768197 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:08Z","lastTransitionTime":"2025-10-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.871941 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.871988 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.872005 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.872021 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.872031 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:08Z","lastTransitionTime":"2025-10-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.974568 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.974609 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.974618 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.974632 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:08 crc kubenswrapper[4899]: I1003 08:41:08.974643 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:08Z","lastTransitionTime":"2025-10-03T08:41:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.077168 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.077240 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.077256 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.077284 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.077300 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:09Z","lastTransitionTime":"2025-10-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.180454 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.180502 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.180513 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.180530 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.180541 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:09Z","lastTransitionTime":"2025-10-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.282957 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.283026 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.283045 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.283072 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.283091 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:09Z","lastTransitionTime":"2025-10-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.385786 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.385827 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.385839 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.385854 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.385865 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:09Z","lastTransitionTime":"2025-10-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.492618 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.492653 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.492661 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.492675 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.492684 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:09Z","lastTransitionTime":"2025-10-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.525990 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.526113 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.526266 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:09 crc kubenswrapper[4899]: E1003 08:41:09.526429 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.526537 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:09 crc kubenswrapper[4899]: E1003 08:41:09.526676 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:09 crc kubenswrapper[4899]: E1003 08:41:09.526846 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:09 crc kubenswrapper[4899]: E1003 08:41:09.527024 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.596403 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.596454 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.596466 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.596482 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.596493 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:09Z","lastTransitionTime":"2025-10-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.698946 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.698988 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.698999 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.699015 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.699025 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:09Z","lastTransitionTime":"2025-10-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.801947 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.802002 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.802015 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.802041 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.802057 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:09Z","lastTransitionTime":"2025-10-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.905578 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.905637 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.905646 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.905668 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:09 crc kubenswrapper[4899]: I1003 08:41:09.905680 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:09Z","lastTransitionTime":"2025-10-03T08:41:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.009188 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.009271 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.009284 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.009307 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.009398 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:10Z","lastTransitionTime":"2025-10-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.112938 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.112985 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.112994 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.113010 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.113022 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:10Z","lastTransitionTime":"2025-10-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.215915 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.215952 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.215961 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.215975 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.215983 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:10Z","lastTransitionTime":"2025-10-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.319147 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.319216 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.319228 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.319251 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.319267 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:10Z","lastTransitionTime":"2025-10-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.421927 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.422005 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.422022 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.422049 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.422063 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:10Z","lastTransitionTime":"2025-10-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.524842 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.524911 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.524920 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.524936 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.524946 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:10Z","lastTransitionTime":"2025-10-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.628465 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.628526 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.628538 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.628559 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.628575 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:10Z","lastTransitionTime":"2025-10-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.731858 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.731952 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.731969 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.731991 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.732007 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:10Z","lastTransitionTime":"2025-10-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.835309 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.835403 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.835430 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.835470 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.835498 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:10Z","lastTransitionTime":"2025-10-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.939079 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.939150 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.939168 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.939192 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:10 crc kubenswrapper[4899]: I1003 08:41:10.939213 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:10Z","lastTransitionTime":"2025-10-03T08:41:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.041264 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.041300 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.041309 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.041322 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.041331 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:11Z","lastTransitionTime":"2025-10-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.144137 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.144195 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.144206 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.144223 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.144236 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:11Z","lastTransitionTime":"2025-10-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.246695 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.246757 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.246768 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.246785 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.246806 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:11Z","lastTransitionTime":"2025-10-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.348441 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.348479 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.348487 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.348499 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.348510 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:11Z","lastTransitionTime":"2025-10-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.450664 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.450728 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.450737 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.450752 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.450763 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:11Z","lastTransitionTime":"2025-10-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.526161 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.526210 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.526210 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:11 crc kubenswrapper[4899]: E1003 08:41:11.526290 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.526181 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:11 crc kubenswrapper[4899]: E1003 08:41:11.526367 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:11 crc kubenswrapper[4899]: E1003 08:41:11.526531 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:11 crc kubenswrapper[4899]: E1003 08:41:11.526585 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.553538 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.553578 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.553588 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.553609 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.553627 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:11Z","lastTransitionTime":"2025-10-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.656428 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.656481 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.656490 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.656509 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.656529 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:11Z","lastTransitionTime":"2025-10-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.759717 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.759786 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.759796 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.759810 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.759821 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:11Z","lastTransitionTime":"2025-10-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.862794 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.862857 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.862874 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.862915 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.862931 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:11Z","lastTransitionTime":"2025-10-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.965761 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.965816 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.965829 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.965851 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:11 crc kubenswrapper[4899]: I1003 08:41:11.965866 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:11Z","lastTransitionTime":"2025-10-03T08:41:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.068965 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.069000 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.069011 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.069026 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.069037 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:12Z","lastTransitionTime":"2025-10-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.171362 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.171401 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.171409 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.171424 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.171435 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:12Z","lastTransitionTime":"2025-10-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.273457 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.273535 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.273557 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.273585 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.273603 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:12Z","lastTransitionTime":"2025-10-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.377312 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.377363 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.377396 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.377415 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.377425 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:12Z","lastTransitionTime":"2025-10-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.479637 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.479691 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.479701 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.479715 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.479725 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:12Z","lastTransitionTime":"2025-10-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.524985 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.525066 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.525078 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.525093 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.525102 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:12Z","lastTransitionTime":"2025-10-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:12 crc kubenswrapper[4899]: E1003 08:41:12.537827 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:12Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.543474 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.543510 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.543518 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.543529 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.543539 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:12Z","lastTransitionTime":"2025-10-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:12 crc kubenswrapper[4899]: E1003 08:41:12.556830 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:12Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.561560 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.561595 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.561607 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.561621 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.561631 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:12Z","lastTransitionTime":"2025-10-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:12 crc kubenswrapper[4899]: E1003 08:41:12.575570 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:12Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.579083 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.579109 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.579117 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.579131 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.579140 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:12Z","lastTransitionTime":"2025-10-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:12 crc kubenswrapper[4899]: E1003 08:41:12.593557 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:12Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.596974 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.597002 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.597011 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.597024 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.597034 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:12Z","lastTransitionTime":"2025-10-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:12 crc kubenswrapper[4899]: E1003 08:41:12.608145 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:12Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:12 crc kubenswrapper[4899]: E1003 08:41:12.608265 4899 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.609805 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.609849 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.609857 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.609871 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.609880 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:12Z","lastTransitionTime":"2025-10-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.712052 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.712095 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.712104 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.712119 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.712132 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:12Z","lastTransitionTime":"2025-10-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.814859 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.814930 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.814941 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.814956 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.814966 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:12Z","lastTransitionTime":"2025-10-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.917217 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.917259 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.917270 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.917286 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:12 crc kubenswrapper[4899]: I1003 08:41:12.917297 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:12Z","lastTransitionTime":"2025-10-03T08:41:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.019758 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.019808 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.019821 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.019838 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.019850 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:13Z","lastTransitionTime":"2025-10-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.121982 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.122022 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.122035 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.122052 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.122060 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:13Z","lastTransitionTime":"2025-10-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.224097 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.224137 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.224148 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.224216 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.224244 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:13Z","lastTransitionTime":"2025-10-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.327092 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.327146 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.327157 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.327176 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.327189 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:13Z","lastTransitionTime":"2025-10-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.429409 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.429438 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.429447 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.429462 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.429471 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:13Z","lastTransitionTime":"2025-10-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.526733 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.526782 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.526736 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:13 crc kubenswrapper[4899]: E1003 08:41:13.526857 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.526753 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:13 crc kubenswrapper[4899]: E1003 08:41:13.526990 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:13 crc kubenswrapper[4899]: E1003 08:41:13.527124 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:13 crc kubenswrapper[4899]: E1003 08:41:13.527166 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.531106 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.531137 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.531148 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.531164 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.531183 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:13Z","lastTransitionTime":"2025-10-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.632655 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.632690 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.632697 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.632710 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.632719 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:13Z","lastTransitionTime":"2025-10-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.734833 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.734869 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.734878 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.734912 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.734924 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:13Z","lastTransitionTime":"2025-10-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.836808 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.836854 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.836864 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.836881 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.836907 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:13Z","lastTransitionTime":"2025-10-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.939165 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.939210 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.939224 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.939241 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:13 crc kubenswrapper[4899]: I1003 08:41:13.939255 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:13Z","lastTransitionTime":"2025-10-03T08:41:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.041673 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.041728 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.041741 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.041757 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.041769 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:14Z","lastTransitionTime":"2025-10-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.144664 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.144951 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.145052 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.145140 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.145245 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:14Z","lastTransitionTime":"2025-10-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.247014 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.247050 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.247060 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.247076 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.247088 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:14Z","lastTransitionTime":"2025-10-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.348883 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.348937 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.348947 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.348962 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.348972 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:14Z","lastTransitionTime":"2025-10-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.451482 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.451518 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.451528 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.451546 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.451558 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:14Z","lastTransitionTime":"2025-10-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.554041 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.554078 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.554087 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.554101 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.554111 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:14Z","lastTransitionTime":"2025-10-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.655878 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.655932 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.655944 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.655961 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.655972 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:14Z","lastTransitionTime":"2025-10-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.757498 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.757546 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.757557 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.757574 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.757586 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:14Z","lastTransitionTime":"2025-10-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.859225 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.859259 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.859269 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.859283 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.859293 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:14Z","lastTransitionTime":"2025-10-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.961421 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.961461 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.961472 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.961487 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:14 crc kubenswrapper[4899]: I1003 08:41:14.961498 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:14Z","lastTransitionTime":"2025-10-03T08:41:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.063153 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.063212 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.063223 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.063240 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.063254 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:15Z","lastTransitionTime":"2025-10-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.165779 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.165819 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.165830 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.165844 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.165854 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:15Z","lastTransitionTime":"2025-10-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.268279 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.268330 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.268343 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.268358 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.268372 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:15Z","lastTransitionTime":"2025-10-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.370940 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.370981 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.371001 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.371016 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.371028 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:15Z","lastTransitionTime":"2025-10-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.473124 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.473178 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.473187 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.473201 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.473210 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:15Z","lastTransitionTime":"2025-10-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.526907 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.526944 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.527012 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:15 crc kubenswrapper[4899]: E1003 08:41:15.527029 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.526916 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:15 crc kubenswrapper[4899]: E1003 08:41:15.527141 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:15 crc kubenswrapper[4899]: E1003 08:41:15.527195 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:15 crc kubenswrapper[4899]: E1003 08:41:15.527316 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.574946 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.574984 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.574991 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.575003 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.575013 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:15Z","lastTransitionTime":"2025-10-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.676997 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.677106 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.677141 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.677178 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.677200 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:15Z","lastTransitionTime":"2025-10-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.780839 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.780980 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.781008 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.781044 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.781066 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:15Z","lastTransitionTime":"2025-10-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.884171 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.884254 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.884266 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.884284 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.884294 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:15Z","lastTransitionTime":"2025-10-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.962466 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs\") pod \"network-metrics-daemon-ldv5d\" (UID: \"27fd79a9-c016-46aa-8b67-446a831eb2d8\") " pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:15 crc kubenswrapper[4899]: E1003 08:41:15.962642 4899 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 08:41:15 crc kubenswrapper[4899]: E1003 08:41:15.962705 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs podName:27fd79a9-c016-46aa-8b67-446a831eb2d8 nodeName:}" failed. No retries permitted until 2025-10-03 08:41:31.962687864 +0000 UTC m=+66.070172817 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs") pod "network-metrics-daemon-ldv5d" (UID: "27fd79a9-c016-46aa-8b67-446a831eb2d8") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.986314 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.986353 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.986364 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.986380 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:15 crc kubenswrapper[4899]: I1003 08:41:15.986390 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:15Z","lastTransitionTime":"2025-10-03T08:41:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.088774 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.088805 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.088815 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.088829 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.088841 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:16Z","lastTransitionTime":"2025-10-03T08:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.191491 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.191528 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.191538 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.191554 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.191565 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:16Z","lastTransitionTime":"2025-10-03T08:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.293863 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.293918 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.293926 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.293944 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.293955 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:16Z","lastTransitionTime":"2025-10-03T08:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.396331 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.396357 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.396365 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.396376 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.396385 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:16Z","lastTransitionTime":"2025-10-03T08:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.498350 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.498395 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.498403 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.498416 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.498425 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:16Z","lastTransitionTime":"2025-10-03T08:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.538434 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:16Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.548144 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:16Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.558461 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:16Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.568452 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:16Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.579027 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:16Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.591005 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:16Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.600376 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.600613 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.600628 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.600657 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.600668 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:16Z","lastTransitionTime":"2025-10-03T08:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.602059 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:16Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.614365 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:16Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.626690 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:16Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.634612 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:16Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.643445 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:16Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.651379 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27fd79a9-c016-46aa-8b67-446a831eb2d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:41:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ldv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:16Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.662785 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:16Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.674452 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:16Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.685212 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:16Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.701968 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"message\\\":\\\"hift-ovn-kubernetes/ovnkube-node-g7f7c openshift-image-registry/node-ca-jpgn4 openshift-machine-config-operator/machine-config-daemon-t2h4g]\\\\nI1003 08:40:58.763384 6353 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1003 08:40:58.763387 6353 lb_config.go:1031] Cluster endpoints for openshift-operator-lifecycle-manager/olm-operator-metrics for network=default are: map[]\\\\nI1003 08:40:58.763399 6353 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-t2h4g\\\\nI1003 08:40:58.763410 6353 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-t2h4g\\\\nI1003 08:40:58.763418 6353 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-t2h4g in node crc\\\\nI1003 08:40:58.763410 6353 services_controller.go:443] Built service openshift-operator-lifecycle-manager/olm-operator-metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.168\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1003 08:40:58.763423 6353 obj\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:16Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.703110 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.703144 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.703157 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.703174 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.703185 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:16Z","lastTransitionTime":"2025-10-03T08:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.805302 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.805338 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.805346 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.805359 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.805368 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:16Z","lastTransitionTime":"2025-10-03T08:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.907726 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.907774 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.907782 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.907797 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.907806 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:16Z","lastTransitionTime":"2025-10-03T08:41:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.982835 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.991379 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 03 08:41:16 crc kubenswrapper[4899]: I1003 08:41:16.994563 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:16Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.006702 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:17Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.010333 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.010368 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.010377 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.010392 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.010401 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:17Z","lastTransitionTime":"2025-10-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.022136 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:17Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.034280 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:17Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.045461 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:17Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.056950 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:17Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.066559 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:17Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.077190 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:17Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.087191 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27fd79a9-c016-46aa-8b67-446a831eb2d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:41:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ldv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:17Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.098506 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:17Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.109073 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:17Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.112474 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.112521 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.112532 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.112552 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.112562 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:17Z","lastTransitionTime":"2025-10-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.120432 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:17Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.139719 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"message\\\":\\\"hift-ovn-kubernetes/ovnkube-node-g7f7c openshift-image-registry/node-ca-jpgn4 openshift-machine-config-operator/machine-config-daemon-t2h4g]\\\\nI1003 08:40:58.763384 6353 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1003 08:40:58.763387 6353 lb_config.go:1031] Cluster endpoints for openshift-operator-lifecycle-manager/olm-operator-metrics for network=default are: map[]\\\\nI1003 08:40:58.763399 6353 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-t2h4g\\\\nI1003 08:40:58.763410 6353 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-t2h4g\\\\nI1003 08:40:58.763418 6353 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-t2h4g in node crc\\\\nI1003 08:40:58.763410 6353 services_controller.go:443] Built service openshift-operator-lifecycle-manager/olm-operator-metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.168\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1003 08:40:58.763423 6353 obj\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:17Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.150975 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:17Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.159170 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:17Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.168747 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:17Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.214579 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.214624 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.214636 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.214653 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.214664 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:17Z","lastTransitionTime":"2025-10-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.276491 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.276548 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.276581 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:17 crc kubenswrapper[4899]: E1003 08:41:17.276663 4899 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 08:41:17 crc kubenswrapper[4899]: E1003 08:41:17.276703 4899 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 08:41:17 crc kubenswrapper[4899]: E1003 08:41:17.276707 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 08:41:17 crc kubenswrapper[4899]: E1003 08:41:17.276731 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 08:41:17 crc kubenswrapper[4899]: E1003 08:41:17.276742 4899 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:41:17 crc kubenswrapper[4899]: E1003 08:41:17.276748 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 08:41:49.2767273 +0000 UTC m=+83.384212313 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 08:41:17 crc kubenswrapper[4899]: E1003 08:41:17.276771 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 08:41:49.276759901 +0000 UTC m=+83.384244984 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 08:41:17 crc kubenswrapper[4899]: E1003 08:41:17.276783 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 08:41:49.276778651 +0000 UTC m=+83.384263604 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.316799 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.316834 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.316842 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.316857 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.316870 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:17Z","lastTransitionTime":"2025-10-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.377789 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.377963 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:17 crc kubenswrapper[4899]: E1003 08:41:17.378039 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:41:49.378012726 +0000 UTC m=+83.485497679 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:41:17 crc kubenswrapper[4899]: E1003 08:41:17.378150 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 08:41:17 crc kubenswrapper[4899]: E1003 08:41:17.378181 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 08:41:17 crc kubenswrapper[4899]: E1003 08:41:17.378192 4899 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:41:17 crc kubenswrapper[4899]: E1003 08:41:17.378249 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 08:41:49.378234192 +0000 UTC m=+83.485719145 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.418591 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.418651 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.418662 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.418677 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.418690 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:17Z","lastTransitionTime":"2025-10-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.524837 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.524880 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.524907 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.524926 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.524938 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:17Z","lastTransitionTime":"2025-10-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.526322 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.526360 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.526328 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.526414 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:17 crc kubenswrapper[4899]: E1003 08:41:17.526545 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:17 crc kubenswrapper[4899]: E1003 08:41:17.526631 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:17 crc kubenswrapper[4899]: E1003 08:41:17.526700 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:17 crc kubenswrapper[4899]: E1003 08:41:17.526777 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.627221 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.627271 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.627283 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.627300 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.627312 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:17Z","lastTransitionTime":"2025-10-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.729452 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.729714 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.729871 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.729974 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.730040 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:17Z","lastTransitionTime":"2025-10-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.831737 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.831778 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.831786 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.831801 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.831810 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:17Z","lastTransitionTime":"2025-10-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.933735 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.934021 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.934105 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.934182 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:17 crc kubenswrapper[4899]: I1003 08:41:17.934265 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:17Z","lastTransitionTime":"2025-10-03T08:41:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.036074 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.036133 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.036146 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.036161 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.036170 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:18Z","lastTransitionTime":"2025-10-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.138349 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.138413 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.138431 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.138447 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.138460 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:18Z","lastTransitionTime":"2025-10-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.240387 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.240441 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.240449 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.240463 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.240472 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:18Z","lastTransitionTime":"2025-10-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.343555 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.343600 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.343610 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.343626 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.343635 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:18Z","lastTransitionTime":"2025-10-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.446049 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.446090 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.446099 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.446115 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.446125 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:18Z","lastTransitionTime":"2025-10-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.548718 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.548757 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.548767 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.548782 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.548794 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:18Z","lastTransitionTime":"2025-10-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.651739 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.651780 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.651797 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.651815 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.651825 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:18Z","lastTransitionTime":"2025-10-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.753684 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.753729 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.753739 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.753752 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.753760 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:18Z","lastTransitionTime":"2025-10-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.855417 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.855447 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.855456 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.855469 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.855478 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:18Z","lastTransitionTime":"2025-10-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.958106 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.958142 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.958151 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.958185 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:18 crc kubenswrapper[4899]: I1003 08:41:18.958197 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:18Z","lastTransitionTime":"2025-10-03T08:41:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.060554 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.060585 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.060609 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.060624 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.060634 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:19Z","lastTransitionTime":"2025-10-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.163278 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.163307 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.163316 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.163328 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.163336 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:19Z","lastTransitionTime":"2025-10-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.266269 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.266308 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.266318 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.266331 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.266341 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:19Z","lastTransitionTime":"2025-10-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.369198 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.369249 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.369263 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.369280 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.369292 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:19Z","lastTransitionTime":"2025-10-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.471692 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.471738 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.471747 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.471763 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.471776 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:19Z","lastTransitionTime":"2025-10-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.526513 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.526522 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.526728 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.526862 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:19 crc kubenswrapper[4899]: E1003 08:41:19.526867 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:19 crc kubenswrapper[4899]: E1003 08:41:19.527045 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:19 crc kubenswrapper[4899]: E1003 08:41:19.527180 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:19 crc kubenswrapper[4899]: E1003 08:41:19.527276 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.576053 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.576095 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.576104 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.576119 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.576131 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:19Z","lastTransitionTime":"2025-10-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.678796 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.678925 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.678974 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.679004 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.679020 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:19Z","lastTransitionTime":"2025-10-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.781760 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.781828 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.781837 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.781858 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.781868 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:19Z","lastTransitionTime":"2025-10-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.884927 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.884992 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.885004 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.885022 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.885037 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:19Z","lastTransitionTime":"2025-10-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.988032 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.988096 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.988114 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.988141 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:19 crc kubenswrapper[4899]: I1003 08:41:19.988163 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:19Z","lastTransitionTime":"2025-10-03T08:41:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.090498 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.090539 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.090550 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.090581 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.090594 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:20Z","lastTransitionTime":"2025-10-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.193289 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.193317 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.193326 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.193340 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.193349 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:20Z","lastTransitionTime":"2025-10-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.296358 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.296402 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.296411 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.296445 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.296457 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:20Z","lastTransitionTime":"2025-10-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.398994 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.399566 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.399643 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.399733 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.399792 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:20Z","lastTransitionTime":"2025-10-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.502167 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.502210 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.502221 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.502237 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.502248 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:20Z","lastTransitionTime":"2025-10-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.528478 4899 scope.go:117] "RemoveContainer" containerID="d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.605054 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.605112 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.605122 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.605135 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.605163 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:20Z","lastTransitionTime":"2025-10-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.707208 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.707243 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.707255 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.707272 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.707283 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:20Z","lastTransitionTime":"2025-10-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.810007 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.810460 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.810475 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.810492 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.810502 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:20Z","lastTransitionTime":"2025-10-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.841708 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovnkube-controller/1.log" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.844204 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerStarted","Data":"a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4"} Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.844703 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.864138 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:20Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.878981 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:20Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.904736 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"message\\\":\\\"hift-ovn-kubernetes/ovnkube-node-g7f7c openshift-image-registry/node-ca-jpgn4 openshift-machine-config-operator/machine-config-daemon-t2h4g]\\\\nI1003 08:40:58.763384 6353 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1003 08:40:58.763387 6353 lb_config.go:1031] Cluster endpoints for openshift-operator-lifecycle-manager/olm-operator-metrics for network=default are: map[]\\\\nI1003 08:40:58.763399 6353 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-t2h4g\\\\nI1003 08:40:58.763410 6353 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-t2h4g\\\\nI1003 08:40:58.763418 6353 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-t2h4g in node crc\\\\nI1003 08:40:58.763410 6353 services_controller.go:443] Built service openshift-operator-lifecycle-manager/olm-operator-metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.168\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1003 08:40:58.763423 6353 obj\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:20Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.912905 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.912946 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.912957 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.912974 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.912986 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:20Z","lastTransitionTime":"2025-10-03T08:41:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.920939 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:20Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.933152 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:20Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.945218 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:20Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.956989 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:20Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.968679 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:20Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.982004 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:20Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:20 crc kubenswrapper[4899]: I1003 08:41:20.993804 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:20Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.004969 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.015911 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.015952 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.015961 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.015975 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.015986 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:21Z","lastTransitionTime":"2025-10-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.018399 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.028435 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.039415 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.050687 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27fd79a9-c016-46aa-8b67-446a831eb2d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:41:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ldv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.060846 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9385ff69-ac63-4dac-b8ba-67bcfd9e9d41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2890c36988f29fbea57dfd712a1d66304622780d233996abe521a7a8bf0a932d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://306605d9d3b9065f7c546feb2478296ab23df5ee17f88e9732b78584614b2c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54093b84153ff7894489025339a6977db0f46b4d2a3b49cc60077af28010d315\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.074682 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.118067 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.118103 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.118111 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.118124 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.118134 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:21Z","lastTransitionTime":"2025-10-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.220416 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.220453 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.220462 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.220477 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.220488 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:21Z","lastTransitionTime":"2025-10-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.322943 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.322981 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.322989 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.323005 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.323014 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:21Z","lastTransitionTime":"2025-10-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.424464 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.424494 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.424502 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.424514 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.424523 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:21Z","lastTransitionTime":"2025-10-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.526181 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.526316 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:21 crc kubenswrapper[4899]: E1003 08:41:21.526360 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.526379 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.526416 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:21 crc kubenswrapper[4899]: E1003 08:41:21.526524 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.526569 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.526589 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.526598 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.526613 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.526636 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:21Z","lastTransitionTime":"2025-10-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:21 crc kubenswrapper[4899]: E1003 08:41:21.526605 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:21 crc kubenswrapper[4899]: E1003 08:41:21.526678 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.629141 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.629186 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.629198 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.629213 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.629222 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:21Z","lastTransitionTime":"2025-10-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.731099 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.731140 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.731151 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.731166 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.731177 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:21Z","lastTransitionTime":"2025-10-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.833418 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.833461 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.833472 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.833487 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.833500 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:21Z","lastTransitionTime":"2025-10-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.847915 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovnkube-controller/2.log" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.848455 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovnkube-controller/1.log" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.850462 4899 generic.go:334] "Generic (PLEG): container finished" podID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerID="a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4" exitCode=1 Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.850511 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerDied","Data":"a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4"} Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.850567 4899 scope.go:117] "RemoveContainer" containerID="d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.851061 4899 scope.go:117] "RemoveContainer" containerID="a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4" Oct 03 08:41:21 crc kubenswrapper[4899]: E1003 08:41:21.851256 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.865697 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.876916 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.887447 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.900326 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.911178 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.924620 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.936176 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.936219 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.936231 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.936248 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.936259 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:21Z","lastTransitionTime":"2025-10-03T08:41:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.936814 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.946143 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.956147 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.966358 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27fd79a9-c016-46aa-8b67-446a831eb2d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:41:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ldv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.976561 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9385ff69-ac63-4dac-b8ba-67bcfd9e9d41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2890c36988f29fbea57dfd712a1d66304622780d233996abe521a7a8bf0a932d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://306605d9d3b9065f7c546feb2478296ab23df5ee17f88e9732b78584614b2c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54093b84153ff7894489025339a6977db0f46b4d2a3b49cc60077af28010d315\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.987561 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:21 crc kubenswrapper[4899]: I1003 08:41:21.999449 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.010294 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.039004 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.039046 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.039058 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.039076 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.039090 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:22Z","lastTransitionTime":"2025-10-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.039709 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6106afab19aeebc1c86a8ad33545f5c5478f610867ac32326ddb9318c09534c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"message\\\":\\\"hift-ovn-kubernetes/ovnkube-node-g7f7c openshift-image-registry/node-ca-jpgn4 openshift-machine-config-operator/machine-config-daemon-t2h4g]\\\\nI1003 08:40:58.763384 6353 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1003 08:40:58.763387 6353 lb_config.go:1031] Cluster endpoints for openshift-operator-lifecycle-manager/olm-operator-metrics for network=default are: map[]\\\\nI1003 08:40:58.763399 6353 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-t2h4g\\\\nI1003 08:40:58.763410 6353 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-t2h4g\\\\nI1003 08:40:58.763418 6353 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-t2h4g in node crc\\\\nI1003 08:40:58.763410 6353 services_controller.go:443] Built service openshift-operator-lifecycle-manager/olm-operator-metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.168\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1003 08:40:58.763423 6353 obj\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:21Z\\\",\\\"message\\\":\\\"31 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1003 08:41:21.244108 6631 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z]\\\\nI1003 08:41:21.244093 6631 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: [\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:41:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.069126 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.087036 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.140791 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.140822 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.140832 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.140845 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.140857 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:22Z","lastTransitionTime":"2025-10-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.243505 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.243538 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.243547 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.243561 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.243571 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:22Z","lastTransitionTime":"2025-10-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.345713 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.345746 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.345755 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.345767 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.345776 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:22Z","lastTransitionTime":"2025-10-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.448347 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.448383 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.448391 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.448404 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.448413 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:22Z","lastTransitionTime":"2025-10-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.550565 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.550599 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.550608 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.550621 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.550632 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:22Z","lastTransitionTime":"2025-10-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.653268 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.653310 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.653323 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.653337 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.653348 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:22Z","lastTransitionTime":"2025-10-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.728424 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.728471 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.728482 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.728506 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.728519 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:22Z","lastTransitionTime":"2025-10-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:22 crc kubenswrapper[4899]: E1003 08:41:22.743098 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.747183 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.747225 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.747234 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.747249 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.747269 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:22Z","lastTransitionTime":"2025-10-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:22 crc kubenswrapper[4899]: E1003 08:41:22.759407 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.764341 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.764389 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.764406 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.764426 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.764438 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:22Z","lastTransitionTime":"2025-10-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:22 crc kubenswrapper[4899]: E1003 08:41:22.775412 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.779940 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.779994 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.780006 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.780027 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.780039 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:22Z","lastTransitionTime":"2025-10-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:22 crc kubenswrapper[4899]: E1003 08:41:22.794192 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.799073 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.799126 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.799144 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.799170 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.799188 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:22Z","lastTransitionTime":"2025-10-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:22 crc kubenswrapper[4899]: E1003 08:41:22.811673 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: E1003 08:41:22.811807 4899 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.814469 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.814535 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.814548 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.814567 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.814579 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:22Z","lastTransitionTime":"2025-10-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.855698 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovnkube-controller/2.log" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.859249 4899 scope.go:117] "RemoveContainer" containerID="a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4" Oct 03 08:41:22 crc kubenswrapper[4899]: E1003 08:41:22.859406 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.874365 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.886131 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.897189 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.913165 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:21Z\\\",\\\"message\\\":\\\"31 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1003 08:41:21.244108 6631 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z]\\\\nI1003 08:41:21.244093 6631 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: [\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:41:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.916465 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.916501 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.916511 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.916529 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.916542 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:22Z","lastTransitionTime":"2025-10-03T08:41:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.925740 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.934037 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.943244 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.953261 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.965293 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.980416 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:22 crc kubenswrapper[4899]: I1003 08:41:22.992047 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9385ff69-ac63-4dac-b8ba-67bcfd9e9d41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2890c36988f29fbea57dfd712a1d66304622780d233996abe521a7a8bf0a932d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://306605d9d3b9065f7c546feb2478296ab23df5ee17f88e9732b78584614b2c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54093b84153ff7894489025339a6977db0f46b4d2a3b49cc60077af28010d315\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:22Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.003282 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:23Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.014146 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:23Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.018301 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.018348 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.018358 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.018373 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.018386 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:23Z","lastTransitionTime":"2025-10-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.025509 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:23Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.034445 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:23Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.045152 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:23Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.056931 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27fd79a9-c016-46aa-8b67-446a831eb2d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:41:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ldv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:23Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.120808 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.120849 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.120860 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.120879 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.120903 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:23Z","lastTransitionTime":"2025-10-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.223521 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.223571 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.223582 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.223596 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.223606 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:23Z","lastTransitionTime":"2025-10-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.326182 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.326256 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.326271 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.326296 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.326321 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:23Z","lastTransitionTime":"2025-10-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.428119 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.428164 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.428173 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.428186 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.428197 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:23Z","lastTransitionTime":"2025-10-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.526273 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.526312 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.526390 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:23 crc kubenswrapper[4899]: E1003 08:41:23.526459 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:23 crc kubenswrapper[4899]: E1003 08:41:23.526408 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.526296 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:23 crc kubenswrapper[4899]: E1003 08:41:23.526586 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:23 crc kubenswrapper[4899]: E1003 08:41:23.526613 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.531051 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.531097 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.531114 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.531131 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.531144 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:23Z","lastTransitionTime":"2025-10-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.633222 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.633256 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.633265 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.633278 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.633287 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:23Z","lastTransitionTime":"2025-10-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.735708 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.735758 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.735766 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.735780 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.735790 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:23Z","lastTransitionTime":"2025-10-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.837691 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.837733 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.837741 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.837755 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.837766 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:23Z","lastTransitionTime":"2025-10-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.939868 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.939916 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.939926 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.939939 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:23 crc kubenswrapper[4899]: I1003 08:41:23.939947 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:23Z","lastTransitionTime":"2025-10-03T08:41:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.041768 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.041821 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.041830 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.041845 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.041855 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:24Z","lastTransitionTime":"2025-10-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.144041 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.144123 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.144136 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.144154 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.144163 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:24Z","lastTransitionTime":"2025-10-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.246000 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.246065 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.246079 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.246094 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.246132 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:24Z","lastTransitionTime":"2025-10-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.348405 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.348436 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.348446 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.348460 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.348471 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:24Z","lastTransitionTime":"2025-10-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.450931 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.450976 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.450990 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.451007 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.451017 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:24Z","lastTransitionTime":"2025-10-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.552608 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.552642 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.552650 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.552665 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.552674 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:24Z","lastTransitionTime":"2025-10-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.655306 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.655344 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.655354 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.655370 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.655381 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:24Z","lastTransitionTime":"2025-10-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.757879 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.757943 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.757955 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.757971 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.757983 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:24Z","lastTransitionTime":"2025-10-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.861939 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.861986 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.862001 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.862020 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.862033 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:24Z","lastTransitionTime":"2025-10-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.964281 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.964322 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.964331 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.964365 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:24 crc kubenswrapper[4899]: I1003 08:41:24.964375 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:24Z","lastTransitionTime":"2025-10-03T08:41:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.067242 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.067308 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.067321 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.067345 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.067360 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:25Z","lastTransitionTime":"2025-10-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.169580 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.169808 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.169878 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.169995 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.170082 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:25Z","lastTransitionTime":"2025-10-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.271947 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.271994 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.272002 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.272018 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.272028 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:25Z","lastTransitionTime":"2025-10-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.374032 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.374067 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.374076 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.374089 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.374097 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:25Z","lastTransitionTime":"2025-10-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.476297 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.476341 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.476357 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.476372 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.476383 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:25Z","lastTransitionTime":"2025-10-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.526147 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:25 crc kubenswrapper[4899]: E1003 08:41:25.526264 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.526450 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.526542 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:25 crc kubenswrapper[4899]: E1003 08:41:25.526700 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:25 crc kubenswrapper[4899]: E1003 08:41:25.526781 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.526924 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:25 crc kubenswrapper[4899]: E1003 08:41:25.527142 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.578942 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.578990 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.579002 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.579017 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.579026 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:25Z","lastTransitionTime":"2025-10-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.682020 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.682304 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.682387 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.682482 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.682577 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:25Z","lastTransitionTime":"2025-10-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.785510 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.785546 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.785557 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.785571 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.785581 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:25Z","lastTransitionTime":"2025-10-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.888148 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.888196 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.888208 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.888225 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.888237 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:25Z","lastTransitionTime":"2025-10-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.990751 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.990798 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.990811 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.990831 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:25 crc kubenswrapper[4899]: I1003 08:41:25.990843 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:25Z","lastTransitionTime":"2025-10-03T08:41:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.092599 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.092638 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.092648 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.092669 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.092678 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:26Z","lastTransitionTime":"2025-10-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.195628 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.195670 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.195681 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.195697 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.195709 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:26Z","lastTransitionTime":"2025-10-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.298237 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.298267 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.298282 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.298299 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.298310 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:26Z","lastTransitionTime":"2025-10-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.400569 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.400607 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.400620 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.400636 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.400646 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:26Z","lastTransitionTime":"2025-10-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.503359 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.503611 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.503733 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.503838 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.503997 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:26Z","lastTransitionTime":"2025-10-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.539693 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:26Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.549013 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:26Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.559561 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:26Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.572724 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:26Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.585468 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:26Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.600220 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:26Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.606478 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.606520 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.606531 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.606547 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.606558 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:26Z","lastTransitionTime":"2025-10-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.611905 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:26Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.622754 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:26Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.631748 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27fd79a9-c016-46aa-8b67-446a831eb2d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:41:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ldv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:26Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.640781 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9385ff69-ac63-4dac-b8ba-67bcfd9e9d41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2890c36988f29fbea57dfd712a1d66304622780d233996abe521a7a8bf0a932d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://306605d9d3b9065f7c546feb2478296ab23df5ee17f88e9732b78584614b2c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54093b84153ff7894489025339a6977db0f46b4d2a3b49cc60077af28010d315\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:26Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.651409 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:26Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.663538 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:26Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.675529 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:26Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.691947 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:21Z\\\",\\\"message\\\":\\\"31 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1003 08:41:21.244108 6631 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z]\\\\nI1003 08:41:21.244093 6631 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: [\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:41:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:26Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.703244 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:26Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.708554 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.708589 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.708600 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.708613 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.708624 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:26Z","lastTransitionTime":"2025-10-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.714884 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:26Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.725543 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:26Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.810320 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.810358 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.810367 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.810381 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.810390 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:26Z","lastTransitionTime":"2025-10-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.913061 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.913088 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.913096 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.913111 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:26 crc kubenswrapper[4899]: I1003 08:41:26.913121 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:26Z","lastTransitionTime":"2025-10-03T08:41:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.015021 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.015063 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.015074 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.015090 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.015103 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:27Z","lastTransitionTime":"2025-10-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.117807 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.117871 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.117880 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.117909 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.117918 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:27Z","lastTransitionTime":"2025-10-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.223581 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.223638 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.223649 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.223668 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.223679 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:27Z","lastTransitionTime":"2025-10-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.326233 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.326264 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.326272 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.326286 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.326294 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:27Z","lastTransitionTime":"2025-10-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.427932 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.428216 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.428228 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.428245 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.428256 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:27Z","lastTransitionTime":"2025-10-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.526461 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.526495 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.526525 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:27 crc kubenswrapper[4899]: E1003 08:41:27.526581 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.526648 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:27 crc kubenswrapper[4899]: E1003 08:41:27.526671 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:27 crc kubenswrapper[4899]: E1003 08:41:27.526779 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:27 crc kubenswrapper[4899]: E1003 08:41:27.526886 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.530402 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.530425 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.530433 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.530446 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.530454 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:27Z","lastTransitionTime":"2025-10-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.632856 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.632921 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.632931 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.632958 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.632968 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:27Z","lastTransitionTime":"2025-10-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.736378 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.736422 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.736431 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.736446 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.736457 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:27Z","lastTransitionTime":"2025-10-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.839195 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.839266 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.839280 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.839297 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.839328 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:27Z","lastTransitionTime":"2025-10-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.942004 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.942088 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.942105 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.942119 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:27 crc kubenswrapper[4899]: I1003 08:41:27.942131 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:27Z","lastTransitionTime":"2025-10-03T08:41:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.045164 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.045200 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.045209 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.045225 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.045233 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:28Z","lastTransitionTime":"2025-10-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.147432 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.147460 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.147468 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.147480 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.147489 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:28Z","lastTransitionTime":"2025-10-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.249720 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.249753 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.249762 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.249776 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.249814 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:28Z","lastTransitionTime":"2025-10-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.352270 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.352314 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.352322 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.352338 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.352347 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:28Z","lastTransitionTime":"2025-10-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.454391 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.454432 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.454446 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.454462 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.454475 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:28Z","lastTransitionTime":"2025-10-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.556472 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.556540 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.556553 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.556568 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.556577 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:28Z","lastTransitionTime":"2025-10-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.658296 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.658357 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.658369 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.658385 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.658396 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:28Z","lastTransitionTime":"2025-10-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.761305 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.761370 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.761379 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.761394 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.761403 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:28Z","lastTransitionTime":"2025-10-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.865247 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.865307 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.865324 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.865350 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.865368 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:28Z","lastTransitionTime":"2025-10-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.968206 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.968236 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.968244 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.968257 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:28 crc kubenswrapper[4899]: I1003 08:41:28.968265 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:28Z","lastTransitionTime":"2025-10-03T08:41:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.071422 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.071523 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.071543 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.071605 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.071624 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:29Z","lastTransitionTime":"2025-10-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.180755 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.180794 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.180802 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.180817 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.180829 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:29Z","lastTransitionTime":"2025-10-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.284029 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.284075 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.284093 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.284119 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.284135 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:29Z","lastTransitionTime":"2025-10-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.386786 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.386824 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.386833 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.386849 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.386859 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:29Z","lastTransitionTime":"2025-10-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.489525 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.489554 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.489562 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.489576 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.489584 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:29Z","lastTransitionTime":"2025-10-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.526229 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:29 crc kubenswrapper[4899]: E1003 08:41:29.526440 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.526729 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:29 crc kubenswrapper[4899]: E1003 08:41:29.526834 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.527020 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:29 crc kubenswrapper[4899]: E1003 08:41:29.527107 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.527260 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:29 crc kubenswrapper[4899]: E1003 08:41:29.527339 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.592883 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.592949 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.592959 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.593000 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.593014 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:29Z","lastTransitionTime":"2025-10-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.696758 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.696822 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.696834 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.696860 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.696875 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:29Z","lastTransitionTime":"2025-10-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.799524 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.799584 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.799594 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.799622 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.799642 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:29Z","lastTransitionTime":"2025-10-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.902717 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.902776 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.902786 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.902808 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:29 crc kubenswrapper[4899]: I1003 08:41:29.902820 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:29Z","lastTransitionTime":"2025-10-03T08:41:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.005794 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.005841 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.005851 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.005868 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.005879 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:30Z","lastTransitionTime":"2025-10-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.108367 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.108420 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.108433 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.108456 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.108471 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:30Z","lastTransitionTime":"2025-10-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.212079 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.212116 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.212130 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.212150 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.212163 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:30Z","lastTransitionTime":"2025-10-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.314685 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.314727 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.314736 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.314748 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.314757 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:30Z","lastTransitionTime":"2025-10-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.417321 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.417403 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.417416 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.417433 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.417444 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:30Z","lastTransitionTime":"2025-10-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.519134 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.519175 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.519188 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.519204 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.519440 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:30Z","lastTransitionTime":"2025-10-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.621663 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.621695 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.621707 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.621720 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.621729 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:30Z","lastTransitionTime":"2025-10-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.724180 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.724240 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.724253 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.724276 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.724290 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:30Z","lastTransitionTime":"2025-10-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.826917 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.826958 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.826967 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.826981 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.826990 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:30Z","lastTransitionTime":"2025-10-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.929357 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.929394 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.929404 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.929422 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:30 crc kubenswrapper[4899]: I1003 08:41:30.929441 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:30Z","lastTransitionTime":"2025-10-03T08:41:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.031963 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.031994 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.032004 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.032018 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.032028 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:31Z","lastTransitionTime":"2025-10-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.134350 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.134394 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.134405 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.134429 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.134441 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:31Z","lastTransitionTime":"2025-10-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.236942 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.236987 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.236997 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.237015 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.237028 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:31Z","lastTransitionTime":"2025-10-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.340390 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.340454 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.340467 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.340490 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.340528 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:31Z","lastTransitionTime":"2025-10-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.443459 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.443491 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.443499 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.443511 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.443520 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:31Z","lastTransitionTime":"2025-10-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.526317 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:31 crc kubenswrapper[4899]: E1003 08:41:31.526499 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.526591 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.526741 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:31 crc kubenswrapper[4899]: E1003 08:41:31.526742 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.526800 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:31 crc kubenswrapper[4899]: E1003 08:41:31.526871 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:31 crc kubenswrapper[4899]: E1003 08:41:31.526955 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.546182 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.546229 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.546240 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.546258 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.546269 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:31Z","lastTransitionTime":"2025-10-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.648551 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.648593 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.648604 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.648619 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.648630 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:31Z","lastTransitionTime":"2025-10-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.752002 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.752060 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.752072 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.752092 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.752105 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:31Z","lastTransitionTime":"2025-10-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.854246 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.854299 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.854308 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.854329 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.854342 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:31Z","lastTransitionTime":"2025-10-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.956643 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.956677 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.956690 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.956705 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:31 crc kubenswrapper[4899]: I1003 08:41:31.956715 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:31Z","lastTransitionTime":"2025-10-03T08:41:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.019010 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs\") pod \"network-metrics-daemon-ldv5d\" (UID: \"27fd79a9-c016-46aa-8b67-446a831eb2d8\") " pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:32 crc kubenswrapper[4899]: E1003 08:41:32.019156 4899 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 08:41:32 crc kubenswrapper[4899]: E1003 08:41:32.019200 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs podName:27fd79a9-c016-46aa-8b67-446a831eb2d8 nodeName:}" failed. No retries permitted until 2025-10-03 08:42:04.019187869 +0000 UTC m=+98.126672812 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs") pod "network-metrics-daemon-ldv5d" (UID: "27fd79a9-c016-46aa-8b67-446a831eb2d8") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.059129 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.059175 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.059190 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.059208 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.059220 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:32Z","lastTransitionTime":"2025-10-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.161836 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.161906 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.161917 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.161933 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.161944 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:32Z","lastTransitionTime":"2025-10-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.263940 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.263975 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.263986 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.264000 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.264012 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:32Z","lastTransitionTime":"2025-10-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.365663 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.365701 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.365712 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.365727 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.365739 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:32Z","lastTransitionTime":"2025-10-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.467703 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.467748 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.467757 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.467771 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.467780 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:32Z","lastTransitionTime":"2025-10-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.570077 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.570118 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.570142 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.570158 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.570167 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:32Z","lastTransitionTime":"2025-10-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.671783 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.671815 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.671825 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.671837 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.671845 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:32Z","lastTransitionTime":"2025-10-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.774279 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.774326 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.774364 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.774391 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.774406 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:32Z","lastTransitionTime":"2025-10-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.876804 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.876848 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.876858 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.876874 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.876885 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:32Z","lastTransitionTime":"2025-10-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.961330 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.961374 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.961386 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.961403 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.961415 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:32Z","lastTransitionTime":"2025-10-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:32 crc kubenswrapper[4899]: E1003 08:41:32.973721 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:32Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.976635 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.976690 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.976698 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.976713 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.976723 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:32Z","lastTransitionTime":"2025-10-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:32 crc kubenswrapper[4899]: E1003 08:41:32.987320 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:32Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.990376 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.990451 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.990483 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.990501 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:32 crc kubenswrapper[4899]: I1003 08:41:32.990510 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:32Z","lastTransitionTime":"2025-10-03T08:41:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:33 crc kubenswrapper[4899]: E1003 08:41:33.001193 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:32Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.004390 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.004471 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.004485 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.004500 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.004509 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:33Z","lastTransitionTime":"2025-10-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:33 crc kubenswrapper[4899]: E1003 08:41:33.016661 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:33Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.020184 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.020234 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.020254 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.020274 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.020285 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:33Z","lastTransitionTime":"2025-10-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:33 crc kubenswrapper[4899]: E1003 08:41:33.032534 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:33Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:33 crc kubenswrapper[4899]: E1003 08:41:33.032649 4899 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.034061 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.034109 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.034121 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.034138 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.034149 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:33Z","lastTransitionTime":"2025-10-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.136375 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.136411 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.136419 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.136435 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.136444 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:33Z","lastTransitionTime":"2025-10-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.238832 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.238877 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.238886 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.238916 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.238928 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:33Z","lastTransitionTime":"2025-10-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.341563 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.341599 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.341609 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.341625 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.341637 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:33Z","lastTransitionTime":"2025-10-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.443287 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.443327 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.443339 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.443355 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.443364 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:33Z","lastTransitionTime":"2025-10-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.526257 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.526286 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.526297 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.526280 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:33 crc kubenswrapper[4899]: E1003 08:41:33.526372 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:33 crc kubenswrapper[4899]: E1003 08:41:33.526526 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:33 crc kubenswrapper[4899]: E1003 08:41:33.526588 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:33 crc kubenswrapper[4899]: E1003 08:41:33.526618 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.546163 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.546199 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.546207 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.546219 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.546228 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:33Z","lastTransitionTime":"2025-10-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.648014 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.648043 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.648054 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.648070 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.648084 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:33Z","lastTransitionTime":"2025-10-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.750618 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.750660 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.750671 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.750687 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.750699 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:33Z","lastTransitionTime":"2025-10-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.852752 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.852825 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.852839 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.852855 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.852867 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:33Z","lastTransitionTime":"2025-10-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.886701 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgdhq_6f75d8d8-3b12-42bf-b447-0afb4413fd54/kube-multus/0.log" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.886961 4899 generic.go:334] "Generic (PLEG): container finished" podID="6f75d8d8-3b12-42bf-b447-0afb4413fd54" containerID="f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f" exitCode=1 Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.887065 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pgdhq" event={"ID":"6f75d8d8-3b12-42bf-b447-0afb4413fd54","Type":"ContainerDied","Data":"f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f"} Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.887558 4899 scope.go:117] "RemoveContainer" containerID="f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.904632 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:33Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.914279 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:33Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.924763 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:33Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.938153 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:33Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.953527 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:33Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.954720 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.954753 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.954762 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.954776 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.954786 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:33Z","lastTransitionTime":"2025-10-03T08:41:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.966218 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"2025-10-03T08:40:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_49c5c875-7012-4f94-a45d-72a046fb0f24\\\\n2025-10-03T08:40:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_49c5c875-7012-4f94-a45d-72a046fb0f24 to /host/opt/cni/bin/\\\\n2025-10-03T08:40:48Z [verbose] multus-daemon started\\\\n2025-10-03T08:40:48Z [verbose] Readiness Indicator file check\\\\n2025-10-03T08:41:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:33Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.978575 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:33Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:33 crc kubenswrapper[4899]: I1003 08:41:33.990113 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:33Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.001824 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:33Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.012967 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27fd79a9-c016-46aa-8b67-446a831eb2d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:41:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ldv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:34Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.025864 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9385ff69-ac63-4dac-b8ba-67bcfd9e9d41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2890c36988f29fbea57dfd712a1d66304622780d233996abe521a7a8bf0a932d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://306605d9d3b9065f7c546feb2478296ab23df5ee17f88e9732b78584614b2c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54093b84153ff7894489025339a6977db0f46b4d2a3b49cc60077af28010d315\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:34Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.039865 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:34Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.051396 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:34Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.056457 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.056498 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.056512 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.056528 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.056540 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:34Z","lastTransitionTime":"2025-10-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.063313 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:34Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.084358 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:21Z\\\",\\\"message\\\":\\\"31 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1003 08:41:21.244108 6631 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z]\\\\nI1003 08:41:21.244093 6631 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: [\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:41:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:34Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.097170 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:34Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.109291 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:34Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.158198 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.158235 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.158247 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.158262 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.158270 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:34Z","lastTransitionTime":"2025-10-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.261050 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.261105 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.261117 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.261133 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.261144 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:34Z","lastTransitionTime":"2025-10-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.363641 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.363671 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.363679 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.363692 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.363700 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:34Z","lastTransitionTime":"2025-10-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.466109 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.466170 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.466183 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.466196 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.466204 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:34Z","lastTransitionTime":"2025-10-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.568983 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.569026 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.569036 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.569050 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.569059 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:34Z","lastTransitionTime":"2025-10-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.671175 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.671246 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.671260 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.671276 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.671285 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:34Z","lastTransitionTime":"2025-10-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.773471 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.773520 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.773534 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.773553 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.773565 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:34Z","lastTransitionTime":"2025-10-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.875766 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.875804 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.875815 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.875831 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.875841 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:34Z","lastTransitionTime":"2025-10-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.892429 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgdhq_6f75d8d8-3b12-42bf-b447-0afb4413fd54/kube-multus/0.log" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.892487 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pgdhq" event={"ID":"6f75d8d8-3b12-42bf-b447-0afb4413fd54","Type":"ContainerStarted","Data":"b2f7940cc405b005b23f102def919753d7820ecc4e45db9f25cb87611611f4dc"} Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.907420 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:34Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.918177 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:34Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.929051 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:34Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.939824 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27fd79a9-c016-46aa-8b67-446a831eb2d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:41:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ldv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:34Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.951651 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9385ff69-ac63-4dac-b8ba-67bcfd9e9d41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2890c36988f29fbea57dfd712a1d66304622780d233996abe521a7a8bf0a932d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://306605d9d3b9065f7c546feb2478296ab23df5ee17f88e9732b78584614b2c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54093b84153ff7894489025339a6977db0f46b4d2a3b49cc60077af28010d315\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:34Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.963748 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:34Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.974735 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:34Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.978201 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.978239 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.978249 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.978264 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.978275 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:34Z","lastTransitionTime":"2025-10-03T08:41:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:34 crc kubenswrapper[4899]: I1003 08:41:34.989318 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:34Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.007998 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:21Z\\\",\\\"message\\\":\\\"31 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1003 08:41:21.244108 6631 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z]\\\\nI1003 08:41:21.244093 6631 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: [\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:41:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:35Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.024420 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:35Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.035496 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:35Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.046643 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:35Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.056002 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:35Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.067864 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:35Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.080907 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.080955 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.080973 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.080991 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.081003 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:35Z","lastTransitionTime":"2025-10-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.083285 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:35Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.095967 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:35Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.111171 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2f7940cc405b005b23f102def919753d7820ecc4e45db9f25cb87611611f4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"2025-10-03T08:40:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_49c5c875-7012-4f94-a45d-72a046fb0f24\\\\n2025-10-03T08:40:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_49c5c875-7012-4f94-a45d-72a046fb0f24 to /host/opt/cni/bin/\\\\n2025-10-03T08:40:48Z [verbose] multus-daemon started\\\\n2025-10-03T08:40:48Z [verbose] Readiness Indicator file check\\\\n2025-10-03T08:41:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:41:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:35Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.182761 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.182806 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.182817 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.182836 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.182849 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:35Z","lastTransitionTime":"2025-10-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.284834 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.284871 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.284882 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.284917 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.284928 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:35Z","lastTransitionTime":"2025-10-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.386725 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.386766 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.386776 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.386795 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.386807 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:35Z","lastTransitionTime":"2025-10-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.489317 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.489382 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.489394 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.489413 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.489429 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:35Z","lastTransitionTime":"2025-10-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.526505 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.526513 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.526577 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.526577 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:35 crc kubenswrapper[4899]: E1003 08:41:35.526952 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:35 crc kubenswrapper[4899]: E1003 08:41:35.527070 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:35 crc kubenswrapper[4899]: E1003 08:41:35.527147 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:35 crc kubenswrapper[4899]: E1003 08:41:35.527201 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.527330 4899 scope.go:117] "RemoveContainer" containerID="a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4" Oct 03 08:41:35 crc kubenswrapper[4899]: E1003 08:41:35.527509 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.591974 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.592014 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.592029 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.592042 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.592053 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:35Z","lastTransitionTime":"2025-10-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.693883 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.693942 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.693955 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.693974 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.693988 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:35Z","lastTransitionTime":"2025-10-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.796329 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.796381 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.796393 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.796409 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.796421 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:35Z","lastTransitionTime":"2025-10-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.898401 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.898428 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.898440 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.898454 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:35 crc kubenswrapper[4899]: I1003 08:41:35.898463 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:35Z","lastTransitionTime":"2025-10-03T08:41:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.001418 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.001485 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.001497 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.001513 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.001526 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:36Z","lastTransitionTime":"2025-10-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.103871 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.104179 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.104275 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.104365 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.104430 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:36Z","lastTransitionTime":"2025-10-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.207271 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.207524 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.207625 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.207721 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.207803 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:36Z","lastTransitionTime":"2025-10-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.310723 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.310791 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.310807 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.310832 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.310852 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:36Z","lastTransitionTime":"2025-10-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.412800 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.412835 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.412843 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.412863 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.412875 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:36Z","lastTransitionTime":"2025-10-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.514624 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.514654 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.514682 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.514695 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.514704 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:36Z","lastTransitionTime":"2025-10-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.540873 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:36Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.555271 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:36Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.568667 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:36Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.589344 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:21Z\\\",\\\"message\\\":\\\"31 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1003 08:41:21.244108 6631 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z]\\\\nI1003 08:41:21.244093 6631 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: [\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:41:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:36Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.601728 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:36Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.614729 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:36Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.616191 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.616246 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.616257 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.616367 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.616385 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:36Z","lastTransitionTime":"2025-10-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.629659 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:36Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.641836 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:36Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.655762 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2f7940cc405b005b23f102def919753d7820ecc4e45db9f25cb87611611f4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"2025-10-03T08:40:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_49c5c875-7012-4f94-a45d-72a046fb0f24\\\\n2025-10-03T08:40:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_49c5c875-7012-4f94-a45d-72a046fb0f24 to /host/opt/cni/bin/\\\\n2025-10-03T08:40:48Z [verbose] multus-daemon started\\\\n2025-10-03T08:40:48Z [verbose] Readiness Indicator file check\\\\n2025-10-03T08:41:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:41:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:36Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.671215 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:36Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.683168 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:36Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.694458 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:36Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.705635 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:36Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.715530 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:36Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.719452 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.719487 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.719496 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.719513 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.719523 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:36Z","lastTransitionTime":"2025-10-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.727402 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:36Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.737662 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27fd79a9-c016-46aa-8b67-446a831eb2d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:41:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ldv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:36Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.750373 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9385ff69-ac63-4dac-b8ba-67bcfd9e9d41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2890c36988f29fbea57dfd712a1d66304622780d233996abe521a7a8bf0a932d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://306605d9d3b9065f7c546feb2478296ab23df5ee17f88e9732b78584614b2c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54093b84153ff7894489025339a6977db0f46b4d2a3b49cc60077af28010d315\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:36Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.822619 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.822647 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.822655 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.822669 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.822678 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:36Z","lastTransitionTime":"2025-10-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.924469 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.924511 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.924523 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.924543 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:36 crc kubenswrapper[4899]: I1003 08:41:36.924555 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:36Z","lastTransitionTime":"2025-10-03T08:41:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.026596 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.026648 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.026661 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.026678 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.026718 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:37Z","lastTransitionTime":"2025-10-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.128605 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.128677 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.128687 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.128700 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.128709 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:37Z","lastTransitionTime":"2025-10-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.230634 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.230690 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.230705 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.230724 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.230737 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:37Z","lastTransitionTime":"2025-10-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.332661 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.332701 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.332709 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.332723 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.332733 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:37Z","lastTransitionTime":"2025-10-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.434588 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.434627 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.434636 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.434650 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.434660 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:37Z","lastTransitionTime":"2025-10-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.526918 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.526980 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.526962 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.526939 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:37 crc kubenswrapper[4899]: E1003 08:41:37.527062 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:37 crc kubenswrapper[4899]: E1003 08:41:37.527158 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:37 crc kubenswrapper[4899]: E1003 08:41:37.527262 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:37 crc kubenswrapper[4899]: E1003 08:41:37.527406 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.536484 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.536531 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.536545 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.536563 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.536576 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:37Z","lastTransitionTime":"2025-10-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.638590 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.638630 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.638643 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.638659 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.638673 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:37Z","lastTransitionTime":"2025-10-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.740642 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.740669 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.740679 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.740691 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.740699 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:37Z","lastTransitionTime":"2025-10-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.842565 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.842602 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.842611 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.842626 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.842639 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:37Z","lastTransitionTime":"2025-10-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.944833 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.944863 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.944872 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.944885 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:37 crc kubenswrapper[4899]: I1003 08:41:37.944913 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:37Z","lastTransitionTime":"2025-10-03T08:41:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.046908 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.046956 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.046968 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.046987 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.046999 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:38Z","lastTransitionTime":"2025-10-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.149480 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.149517 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.149530 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.149547 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.149556 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:38Z","lastTransitionTime":"2025-10-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.251722 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.251764 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.251777 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.251793 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.251802 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:38Z","lastTransitionTime":"2025-10-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.353886 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.353936 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.353944 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.353958 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.353967 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:38Z","lastTransitionTime":"2025-10-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.456379 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.456444 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.456458 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.456491 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.456504 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:38Z","lastTransitionTime":"2025-10-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.559271 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.559304 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.559312 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.559327 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.559337 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:38Z","lastTransitionTime":"2025-10-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.661675 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.661703 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.661711 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.661724 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.661734 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:38Z","lastTransitionTime":"2025-10-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.763735 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.763786 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.763797 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.763815 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.763832 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:38Z","lastTransitionTime":"2025-10-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.865801 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.865830 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.865841 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.865858 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.865871 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:38Z","lastTransitionTime":"2025-10-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.968306 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.968351 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.968362 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.968377 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:38 crc kubenswrapper[4899]: I1003 08:41:38.968390 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:38Z","lastTransitionTime":"2025-10-03T08:41:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.071084 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.071140 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.071153 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.071185 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.071199 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:39Z","lastTransitionTime":"2025-10-03T08:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.174118 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.174154 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.174163 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.174176 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.174185 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:39Z","lastTransitionTime":"2025-10-03T08:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.276589 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.276648 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.276657 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.276674 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.276685 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:39Z","lastTransitionTime":"2025-10-03T08:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.379371 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.379402 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.379411 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.379445 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.379456 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:39Z","lastTransitionTime":"2025-10-03T08:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.482631 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.482712 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.482729 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.482757 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.482774 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:39Z","lastTransitionTime":"2025-10-03T08:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.526592 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.526655 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:39 crc kubenswrapper[4899]: E1003 08:41:39.526703 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.526756 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:39 crc kubenswrapper[4899]: E1003 08:41:39.526799 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.526816 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:39 crc kubenswrapper[4899]: E1003 08:41:39.526933 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:39 crc kubenswrapper[4899]: E1003 08:41:39.527135 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.586034 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.586089 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.586107 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.586129 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.586145 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:39Z","lastTransitionTime":"2025-10-03T08:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.689294 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.689340 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.689351 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.689367 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.689380 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:39Z","lastTransitionTime":"2025-10-03T08:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.792686 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.792756 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.792773 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.792801 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.792820 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:39Z","lastTransitionTime":"2025-10-03T08:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.895804 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.895856 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.895869 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.895908 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:39 crc kubenswrapper[4899]: I1003 08:41:39.895931 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:39Z","lastTransitionTime":"2025-10-03T08:41:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:39.999991 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.000070 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.000084 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.000110 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.000129 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:40Z","lastTransitionTime":"2025-10-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.102709 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.102767 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.102777 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.102800 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.102812 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:40Z","lastTransitionTime":"2025-10-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.204987 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.205043 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.205053 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.205068 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.205079 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:40Z","lastTransitionTime":"2025-10-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.307225 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.307266 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.307277 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.307292 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.307302 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:40Z","lastTransitionTime":"2025-10-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.409162 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.409203 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.409215 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.409232 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.409244 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:40Z","lastTransitionTime":"2025-10-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.511666 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.511752 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.511764 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.511785 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.511798 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:40Z","lastTransitionTime":"2025-10-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.614282 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.615075 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.615101 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.615121 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.615132 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:40Z","lastTransitionTime":"2025-10-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.717617 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.717667 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.717687 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.717704 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.717716 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:40Z","lastTransitionTime":"2025-10-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.819939 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.820003 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.820016 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.820052 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.820065 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:40Z","lastTransitionTime":"2025-10-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.921819 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.921863 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.921872 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.921900 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:40 crc kubenswrapper[4899]: I1003 08:41:40.921911 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:40Z","lastTransitionTime":"2025-10-03T08:41:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.024754 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.024790 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.024798 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.024815 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.024825 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:41Z","lastTransitionTime":"2025-10-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.127109 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.127162 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.127178 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.127194 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.127204 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:41Z","lastTransitionTime":"2025-10-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.229493 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.229524 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.229532 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.229546 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.229554 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:41Z","lastTransitionTime":"2025-10-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.332580 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.332623 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.332632 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.332654 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.332665 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:41Z","lastTransitionTime":"2025-10-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.435525 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.435570 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.435579 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.435594 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.435603 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:41Z","lastTransitionTime":"2025-10-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.526597 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.526639 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.526639 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.526664 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:41 crc kubenswrapper[4899]: E1003 08:41:41.526786 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:41 crc kubenswrapper[4899]: E1003 08:41:41.526954 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:41 crc kubenswrapper[4899]: E1003 08:41:41.527011 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:41 crc kubenswrapper[4899]: E1003 08:41:41.527050 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.538609 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.538660 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.538676 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.538697 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.538711 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:41Z","lastTransitionTime":"2025-10-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.641709 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.641784 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.641797 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.641821 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.641838 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:41Z","lastTransitionTime":"2025-10-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.744794 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.744918 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.744930 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.744946 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.744956 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:41Z","lastTransitionTime":"2025-10-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.848155 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.848208 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.848216 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.848232 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.848244 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:41Z","lastTransitionTime":"2025-10-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.950690 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.950738 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.950748 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.950765 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:41 crc kubenswrapper[4899]: I1003 08:41:41.950777 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:41Z","lastTransitionTime":"2025-10-03T08:41:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.052760 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.052807 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.052818 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.052835 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.052845 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:42Z","lastTransitionTime":"2025-10-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.155067 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.155105 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.155118 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.155136 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.155148 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:42Z","lastTransitionTime":"2025-10-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.258184 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.258235 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.258346 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.258366 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.258379 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:42Z","lastTransitionTime":"2025-10-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.361488 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.361574 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.361589 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.361615 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.361632 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:42Z","lastTransitionTime":"2025-10-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.464214 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.464255 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.464266 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.464282 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.464292 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:42Z","lastTransitionTime":"2025-10-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.566427 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.566492 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.566505 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.566522 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.566536 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:42Z","lastTransitionTime":"2025-10-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.668883 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.669109 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.669291 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.669436 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.669573 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:42Z","lastTransitionTime":"2025-10-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.771847 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.771883 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.771910 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.771929 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.771940 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:42Z","lastTransitionTime":"2025-10-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.874984 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.875332 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.875345 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.875362 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.875372 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:42Z","lastTransitionTime":"2025-10-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.977736 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.977786 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.977799 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.977815 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:42 crc kubenswrapper[4899]: I1003 08:41:42.977827 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:42Z","lastTransitionTime":"2025-10-03T08:41:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.079711 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.079743 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.079754 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.079769 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.079823 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:43Z","lastTransitionTime":"2025-10-03T08:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.141511 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.141548 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.141557 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.141568 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.141576 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:43Z","lastTransitionTime":"2025-10-03T08:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:43 crc kubenswrapper[4899]: E1003 08:41:43.153407 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:43Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.156942 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.156990 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.157005 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.157024 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.157037 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:43Z","lastTransitionTime":"2025-10-03T08:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:43 crc kubenswrapper[4899]: E1003 08:41:43.167875 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:43Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.175506 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.175541 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.175550 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.175563 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.175571 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:43Z","lastTransitionTime":"2025-10-03T08:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:43 crc kubenswrapper[4899]: E1003 08:41:43.187542 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:43Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.191127 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.191179 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.191194 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.191211 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.191223 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:43Z","lastTransitionTime":"2025-10-03T08:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:43 crc kubenswrapper[4899]: E1003 08:41:43.201980 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:43Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.205264 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.205301 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.205310 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.205325 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.205336 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:43Z","lastTransitionTime":"2025-10-03T08:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:43 crc kubenswrapper[4899]: E1003 08:41:43.216951 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:43Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:43 crc kubenswrapper[4899]: E1003 08:41:43.217067 4899 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.218403 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.218501 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.218513 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.218526 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.218550 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:43Z","lastTransitionTime":"2025-10-03T08:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.321102 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.321144 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.321155 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.321170 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.321180 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:43Z","lastTransitionTime":"2025-10-03T08:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.423212 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.423244 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.423252 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.423265 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.423310 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:43Z","lastTransitionTime":"2025-10-03T08:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.525771 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.525804 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.525815 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.525815 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.525830 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.525841 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:43Z","lastTransitionTime":"2025-10-03T08:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.525919 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:43 crc kubenswrapper[4899]: E1003 08:41:43.526633 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.526660 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:43 crc kubenswrapper[4899]: E1003 08:41:43.526827 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.529183 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:43 crc kubenswrapper[4899]: E1003 08:41:43.529326 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:43 crc kubenswrapper[4899]: E1003 08:41:43.529493 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.628290 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.628373 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.628390 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.628406 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.628417 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:43Z","lastTransitionTime":"2025-10-03T08:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.730610 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.730646 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.730654 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.730668 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.730677 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:43Z","lastTransitionTime":"2025-10-03T08:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.833010 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.833053 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.833061 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.833074 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.833083 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:43Z","lastTransitionTime":"2025-10-03T08:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.935059 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.935111 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.935120 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.935133 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:43 crc kubenswrapper[4899]: I1003 08:41:43.935142 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:43Z","lastTransitionTime":"2025-10-03T08:41:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.037471 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.037509 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.037520 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.037535 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.037546 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:44Z","lastTransitionTime":"2025-10-03T08:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.139952 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.139988 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.139997 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.140011 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.140021 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:44Z","lastTransitionTime":"2025-10-03T08:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.242356 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.242404 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.242416 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.242435 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.242445 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:44Z","lastTransitionTime":"2025-10-03T08:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.344926 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.344975 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.344987 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.345002 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.345014 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:44Z","lastTransitionTime":"2025-10-03T08:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.447519 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.447567 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.447580 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.447596 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.447607 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:44Z","lastTransitionTime":"2025-10-03T08:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.549400 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.549441 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.549453 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.549468 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.549486 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:44Z","lastTransitionTime":"2025-10-03T08:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.652049 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.652092 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.652099 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.652115 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.652123 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:44Z","lastTransitionTime":"2025-10-03T08:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.753873 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.753926 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.753938 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.753954 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.753964 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:44Z","lastTransitionTime":"2025-10-03T08:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.856436 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.856487 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.856495 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.856514 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.856528 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:44Z","lastTransitionTime":"2025-10-03T08:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.959016 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.959079 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.959090 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.959129 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:44 crc kubenswrapper[4899]: I1003 08:41:44.959141 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:44Z","lastTransitionTime":"2025-10-03T08:41:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.061762 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.061806 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.061817 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.061833 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.061846 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:45Z","lastTransitionTime":"2025-10-03T08:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.164286 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.164327 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.164342 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.164357 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.164369 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:45Z","lastTransitionTime":"2025-10-03T08:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.266914 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.266949 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.266960 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.266973 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.266982 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:45Z","lastTransitionTime":"2025-10-03T08:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.369088 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.369137 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.369145 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.369160 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.369169 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:45Z","lastTransitionTime":"2025-10-03T08:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.471934 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.471989 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.472001 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.472019 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.472034 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:45Z","lastTransitionTime":"2025-10-03T08:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.526410 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.526472 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:45 crc kubenswrapper[4899]: E1003 08:41:45.526537 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.526472 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.526472 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:45 crc kubenswrapper[4899]: E1003 08:41:45.526609 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:45 crc kubenswrapper[4899]: E1003 08:41:45.526678 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:45 crc kubenswrapper[4899]: E1003 08:41:45.526738 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.573868 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.573918 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.573926 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.573940 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.573949 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:45Z","lastTransitionTime":"2025-10-03T08:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.676317 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.676356 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.676364 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.676378 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.676391 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:45Z","lastTransitionTime":"2025-10-03T08:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.778442 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.778481 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.778492 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.778512 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.778526 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:45Z","lastTransitionTime":"2025-10-03T08:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.880745 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.880781 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.880790 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.880802 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.880811 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:45Z","lastTransitionTime":"2025-10-03T08:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.982476 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.982529 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.982538 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.982552 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:45 crc kubenswrapper[4899]: I1003 08:41:45.982561 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:45Z","lastTransitionTime":"2025-10-03T08:41:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.085494 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.085531 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.085542 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.085557 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.085568 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:46Z","lastTransitionTime":"2025-10-03T08:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.188462 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.188524 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.188533 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.188548 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.188557 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:46Z","lastTransitionTime":"2025-10-03T08:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.291394 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.291468 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.291490 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.291519 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.291537 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:46Z","lastTransitionTime":"2025-10-03T08:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.394753 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.394803 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.394823 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.394845 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.394861 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:46Z","lastTransitionTime":"2025-10-03T08:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.498001 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.498062 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.498081 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.498105 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.498131 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:46Z","lastTransitionTime":"2025-10-03T08:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.547268 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.564650 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.580047 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.594326 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2f7940cc405b005b23f102def919753d7820ecc4e45db9f25cb87611611f4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"2025-10-03T08:40:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_49c5c875-7012-4f94-a45d-72a046fb0f24\\\\n2025-10-03T08:40:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_49c5c875-7012-4f94-a45d-72a046fb0f24 to /host/opt/cni/bin/\\\\n2025-10-03T08:40:48Z [verbose] multus-daemon started\\\\n2025-10-03T08:40:48Z [verbose] Readiness Indicator file check\\\\n2025-10-03T08:41:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:41:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.600542 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.600575 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.600587 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.600603 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.600614 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:46Z","lastTransitionTime":"2025-10-03T08:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.611616 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.632047 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.648560 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.667109 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.687584 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.702810 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.702855 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.702865 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.702883 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.702906 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:46Z","lastTransitionTime":"2025-10-03T08:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.711661 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.727243 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27fd79a9-c016-46aa-8b67-446a831eb2d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:41:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ldv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.738489 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9385ff69-ac63-4dac-b8ba-67bcfd9e9d41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2890c36988f29fbea57dfd712a1d66304622780d233996abe521a7a8bf0a932d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://306605d9d3b9065f7c546feb2478296ab23df5ee17f88e9732b78584614b2c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54093b84153ff7894489025339a6977db0f46b4d2a3b49cc60077af28010d315\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.750627 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.762007 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.774206 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.791614 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:21Z\\\",\\\"message\\\":\\\"31 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1003 08:41:21.244108 6631 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z]\\\\nI1003 08:41:21.244093 6631 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: [\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:41:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.803853 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:46Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.805171 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.805202 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.805211 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.805227 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.805236 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:46Z","lastTransitionTime":"2025-10-03T08:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.910679 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.910723 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.910734 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.910749 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:46 crc kubenswrapper[4899]: I1003 08:41:46.910757 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:46Z","lastTransitionTime":"2025-10-03T08:41:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.013407 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.013448 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.013456 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.013471 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.013481 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:47Z","lastTransitionTime":"2025-10-03T08:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.116091 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.116133 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.116144 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.116160 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.116172 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:47Z","lastTransitionTime":"2025-10-03T08:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.218261 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.218299 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.218307 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.218324 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.218335 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:47Z","lastTransitionTime":"2025-10-03T08:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.320701 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.320749 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.320761 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.320778 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.320792 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:47Z","lastTransitionTime":"2025-10-03T08:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.423125 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.423175 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.423187 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.423205 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.423216 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:47Z","lastTransitionTime":"2025-10-03T08:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.529851 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.529966 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.530125 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.530135 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.530145 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.530155 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.530169 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.530178 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:47Z","lastTransitionTime":"2025-10-03T08:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:47 crc kubenswrapper[4899]: E1003 08:41:47.530198 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:47 crc kubenswrapper[4899]: E1003 08:41:47.530137 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:47 crc kubenswrapper[4899]: E1003 08:41:47.529991 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.530587 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:47 crc kubenswrapper[4899]: E1003 08:41:47.530651 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.530952 4899 scope.go:117] "RemoveContainer" containerID="a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.632649 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.632721 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.632731 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.632744 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.632754 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:47Z","lastTransitionTime":"2025-10-03T08:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.734645 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.734681 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.734693 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.734708 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.734718 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:47Z","lastTransitionTime":"2025-10-03T08:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.837165 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.837212 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.837224 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.837240 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.837252 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:47Z","lastTransitionTime":"2025-10-03T08:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.930379 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovnkube-controller/2.log" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.932712 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerStarted","Data":"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee"} Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.933131 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.938525 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.938550 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.938558 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.938570 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.938579 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:47Z","lastTransitionTime":"2025-10-03T08:41:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.943705 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.954680 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2f7940cc405b005b23f102def919753d7820ecc4e45db9f25cb87611611f4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"2025-10-03T08:40:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_49c5c875-7012-4f94-a45d-72a046fb0f24\\\\n2025-10-03T08:40:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_49c5c875-7012-4f94-a45d-72a046fb0f24 to /host/opt/cni/bin/\\\\n2025-10-03T08:40:48Z [verbose] multus-daemon started\\\\n2025-10-03T08:40:48Z [verbose] Readiness Indicator file check\\\\n2025-10-03T08:41:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:41:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.968313 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.978671 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.989689 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:47 crc kubenswrapper[4899]: I1003 08:41:47.999045 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27fd79a9-c016-46aa-8b67-446a831eb2d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:41:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ldv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:47Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.010432 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9385ff69-ac63-4dac-b8ba-67bcfd9e9d41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2890c36988f29fbea57dfd712a1d66304622780d233996abe521a7a8bf0a932d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://306605d9d3b9065f7c546feb2478296ab23df5ee17f88e9732b78584614b2c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54093b84153ff7894489025339a6977db0f46b4d2a3b49cc60077af28010d315\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.022129 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.038271 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.040627 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.040664 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.040675 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.040691 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.040702 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:48Z","lastTransitionTime":"2025-10-03T08:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.053462 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.072573 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:21Z\\\",\\\"message\\\":\\\"31 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1003 08:41:21.244108 6631 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z]\\\\nI1003 08:41:21.244093 6631 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: [\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:41:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.083685 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.095530 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.104573 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.117245 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.126810 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.137340 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.143020 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.143057 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.143067 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.143083 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.143094 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:48Z","lastTransitionTime":"2025-10-03T08:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.244877 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.244937 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.244946 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.244961 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.244977 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:48Z","lastTransitionTime":"2025-10-03T08:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.346529 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.346560 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.346570 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.346590 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.346603 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:48Z","lastTransitionTime":"2025-10-03T08:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.448206 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.448265 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.448273 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.448288 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.448306 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:48Z","lastTransitionTime":"2025-10-03T08:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.550561 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.550602 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.550619 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.550642 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.550664 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:48Z","lastTransitionTime":"2025-10-03T08:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.653139 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.653178 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.653191 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.653209 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.653222 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:48Z","lastTransitionTime":"2025-10-03T08:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.755681 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.755991 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.756002 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.756015 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.756025 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:48Z","lastTransitionTime":"2025-10-03T08:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.858369 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.858426 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.858436 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.858449 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.858458 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:48Z","lastTransitionTime":"2025-10-03T08:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.937678 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovnkube-controller/3.log" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.938278 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovnkube-controller/2.log" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.940674 4899 generic.go:334] "Generic (PLEG): container finished" podID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerID="4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee" exitCode=1 Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.940728 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerDied","Data":"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee"} Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.940812 4899 scope.go:117] "RemoveContainer" containerID="a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.941376 4899 scope.go:117] "RemoveContainer" containerID="4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee" Oct 03 08:41:48 crc kubenswrapper[4899]: E1003 08:41:48.941544 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.955812 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.960739 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.960774 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.960783 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.960797 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.960806 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:48Z","lastTransitionTime":"2025-10-03T08:41:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.967208 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.979080 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:48 crc kubenswrapper[4899]: I1003 08:41:48.996869 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a56dddb4754810c1205abc563d8c68f51b19c03469eb832f543879d689844ea4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:21Z\\\",\\\"message\\\":\\\"31 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1003 08:41:21.244108 6631 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:21Z is after 2025-08-24T17:21:41Z]\\\\nI1003 08:41:21.244093 6631 services_controller.go:444] Built service openshift-config-operator/metrics LB per-node configs for network=default: [\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:41:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:48Z\\\",\\\"message\\\":\\\"rvices.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/certified-operators_TCP_cluster\\\\\\\", UUID:\\\\\\\"20da2226-531c-4179-9810-aa4026995ca3\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/certified-operators\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/certified-operators_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/certified-operators\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.214\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1003 08:41:48.251140 7043 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:48Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.008250 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.017481 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.028237 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.037880 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.049631 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2f7940cc405b005b23f102def919753d7820ecc4e45db9f25cb87611611f4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"2025-10-03T08:40:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_49c5c875-7012-4f94-a45d-72a046fb0f24\\\\n2025-10-03T08:40:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_49c5c875-7012-4f94-a45d-72a046fb0f24 to /host/opt/cni/bin/\\\\n2025-10-03T08:40:48Z [verbose] multus-daemon started\\\\n2025-10-03T08:40:48Z [verbose] Readiness Indicator file check\\\\n2025-10-03T08:41:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:41:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.062659 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.062704 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.062713 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.062727 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.062739 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:49Z","lastTransitionTime":"2025-10-03T08:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.062725 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.071688 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27fd79a9-c016-46aa-8b67-446a831eb2d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:41:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ldv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.081526 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9385ff69-ac63-4dac-b8ba-67bcfd9e9d41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2890c36988f29fbea57dfd712a1d66304622780d233996abe521a7a8bf0a932d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://306605d9d3b9065f7c546feb2478296ab23df5ee17f88e9732b78584614b2c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54093b84153ff7894489025339a6977db0f46b4d2a3b49cc60077af28010d315\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.091861 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.104284 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.115311 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.124017 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.133385 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.165066 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.165095 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.165104 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.165117 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.165127 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:49Z","lastTransitionTime":"2025-10-03T08:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.267270 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.267308 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.267317 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.267331 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.267340 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:49Z","lastTransitionTime":"2025-10-03T08:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.298749 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.298807 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.298837 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:49 crc kubenswrapper[4899]: E1003 08:41:49.298974 4899 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 08:41:49 crc kubenswrapper[4899]: E1003 08:41:49.298994 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 08:41:49 crc kubenswrapper[4899]: E1003 08:41:49.299017 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 08:41:49 crc kubenswrapper[4899]: E1003 08:41:49.299029 4899 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:41:49 crc kubenswrapper[4899]: E1003 08:41:49.298979 4899 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 08:41:49 crc kubenswrapper[4899]: E1003 08:41:49.299054 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.299035347 +0000 UTC m=+147.406520380 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 08:41:49 crc kubenswrapper[4899]: E1003 08:41:49.299092 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.299082918 +0000 UTC m=+147.406567951 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:41:49 crc kubenswrapper[4899]: E1003 08:41:49.299104 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.299098089 +0000 UTC m=+147.406583042 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.369058 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.369109 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.369118 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.369130 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.369140 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:49Z","lastTransitionTime":"2025-10-03T08:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.399581 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:41:49 crc kubenswrapper[4899]: E1003 08:41:49.399876 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.39984995 +0000 UTC m=+147.507334903 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.400010 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:49 crc kubenswrapper[4899]: E1003 08:41:49.400131 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 08:41:49 crc kubenswrapper[4899]: E1003 08:41:49.400148 4899 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 08:41:49 crc kubenswrapper[4899]: E1003 08:41:49.400158 4899 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:41:49 crc kubenswrapper[4899]: E1003 08:41:49.400190 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.40018229 +0000 UTC m=+147.507667243 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.471701 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.471752 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.471772 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.471789 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.471799 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:49Z","lastTransitionTime":"2025-10-03T08:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.526194 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.526226 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.526242 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:49 crc kubenswrapper[4899]: E1003 08:41:49.526323 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.526335 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:49 crc kubenswrapper[4899]: E1003 08:41:49.526451 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:49 crc kubenswrapper[4899]: E1003 08:41:49.526502 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:49 crc kubenswrapper[4899]: E1003 08:41:49.526687 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.574744 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.574809 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.574821 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.574844 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.574857 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:49Z","lastTransitionTime":"2025-10-03T08:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.677767 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.677813 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.677824 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.677839 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.677849 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:49Z","lastTransitionTime":"2025-10-03T08:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.781184 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.781272 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.781283 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.781304 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.781315 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:49Z","lastTransitionTime":"2025-10-03T08:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.884604 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.884655 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.884668 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.884686 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.884698 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:49Z","lastTransitionTime":"2025-10-03T08:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.946498 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovnkube-controller/3.log" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.952372 4899 scope.go:117] "RemoveContainer" containerID="4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee" Oct 03 08:41:49 crc kubenswrapper[4899]: E1003 08:41:49.952786 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.970815 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.983565 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.988755 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.988823 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.988838 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.988865 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.988882 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:49Z","lastTransitionTime":"2025-10-03T08:41:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:49 crc kubenswrapper[4899]: I1003 08:41:49.997436 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:49Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.010302 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.023564 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2f7940cc405b005b23f102def919753d7820ecc4e45db9f25cb87611611f4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"2025-10-03T08:40:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_49c5c875-7012-4f94-a45d-72a046fb0f24\\\\n2025-10-03T08:40:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_49c5c875-7012-4f94-a45d-72a046fb0f24 to /host/opt/cni/bin/\\\\n2025-10-03T08:40:48Z [verbose] multus-daemon started\\\\n2025-10-03T08:40:48Z [verbose] Readiness Indicator file check\\\\n2025-10-03T08:41:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:41:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.037923 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.048464 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27fd79a9-c016-46aa-8b67-446a831eb2d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:41:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ldv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.063160 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9385ff69-ac63-4dac-b8ba-67bcfd9e9d41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2890c36988f29fbea57dfd712a1d66304622780d233996abe521a7a8bf0a932d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://306605d9d3b9065f7c546feb2478296ab23df5ee17f88e9732b78584614b2c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54093b84153ff7894489025339a6977db0f46b4d2a3b49cc60077af28010d315\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.078332 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.091399 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.091463 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.091477 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.091495 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.091525 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:50Z","lastTransitionTime":"2025-10-03T08:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.093213 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.108523 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.121751 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.135188 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.150686 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.166687 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.182088 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.194983 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.195038 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.195049 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.195071 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.195084 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:50Z","lastTransitionTime":"2025-10-03T08:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.206844 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:48Z\\\",\\\"message\\\":\\\"rvices.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/certified-operators_TCP_cluster\\\\\\\", UUID:\\\\\\\"20da2226-531c-4179-9810-aa4026995ca3\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/certified-operators\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/certified-operators_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/certified-operators\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.214\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1003 08:41:48.251140 7043 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:41:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:50Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.297547 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.297609 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.297623 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.297640 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.297651 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:50Z","lastTransitionTime":"2025-10-03T08:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.401854 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.402001 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.402025 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.402060 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.402082 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:50Z","lastTransitionTime":"2025-10-03T08:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.505807 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.505852 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.505862 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.505881 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.505905 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:50Z","lastTransitionTime":"2025-10-03T08:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.608961 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.609019 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.609035 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.609062 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.609078 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:50Z","lastTransitionTime":"2025-10-03T08:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.712419 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.712823 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.713050 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.713174 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.713315 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:50Z","lastTransitionTime":"2025-10-03T08:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.815350 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.815390 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.815401 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.815414 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.815423 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:50Z","lastTransitionTime":"2025-10-03T08:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.916855 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.916928 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.916939 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.917054 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:50 crc kubenswrapper[4899]: I1003 08:41:50.917067 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:50Z","lastTransitionTime":"2025-10-03T08:41:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.019193 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.019235 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.019245 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.019262 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.019274 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:51Z","lastTransitionTime":"2025-10-03T08:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.122047 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.122086 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.122094 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.122107 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.122117 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:51Z","lastTransitionTime":"2025-10-03T08:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.225176 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.225260 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.225276 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.225308 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.225324 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:51Z","lastTransitionTime":"2025-10-03T08:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.327751 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.327811 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.327821 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.327843 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.327869 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:51Z","lastTransitionTime":"2025-10-03T08:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.431142 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.431202 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.431215 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.431235 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.431244 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:51Z","lastTransitionTime":"2025-10-03T08:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.526940 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.527049 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.527119 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:51 crc kubenswrapper[4899]: E1003 08:41:51.527205 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.527265 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:51 crc kubenswrapper[4899]: E1003 08:41:51.527389 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:51 crc kubenswrapper[4899]: E1003 08:41:51.527519 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:51 crc kubenswrapper[4899]: E1003 08:41:51.527852 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.535113 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.535151 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.535162 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.535196 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.535206 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:51Z","lastTransitionTime":"2025-10-03T08:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.542955 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.638452 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.638509 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.638523 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.638548 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.638565 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:51Z","lastTransitionTime":"2025-10-03T08:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.741136 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.741201 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.741242 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.741259 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.741276 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:51Z","lastTransitionTime":"2025-10-03T08:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.844075 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.844118 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.844126 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.844141 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.844152 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:51Z","lastTransitionTime":"2025-10-03T08:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.946880 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.946955 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.946969 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.946990 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:51 crc kubenswrapper[4899]: I1003 08:41:51.947006 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:51Z","lastTransitionTime":"2025-10-03T08:41:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.050106 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.050143 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.050154 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.050187 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.050199 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:52Z","lastTransitionTime":"2025-10-03T08:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.152451 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.152507 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.152542 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.152565 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.152581 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:52Z","lastTransitionTime":"2025-10-03T08:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.255369 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.255447 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.255472 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.255969 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.256013 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:52Z","lastTransitionTime":"2025-10-03T08:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.359086 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.359132 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.359173 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.359189 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.359199 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:52Z","lastTransitionTime":"2025-10-03T08:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.462667 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.462747 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.462767 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.462801 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.462825 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:52Z","lastTransitionTime":"2025-10-03T08:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.566004 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.566062 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.566072 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.566089 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.566100 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:52Z","lastTransitionTime":"2025-10-03T08:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.670213 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.670279 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.670298 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.670325 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.670341 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:52Z","lastTransitionTime":"2025-10-03T08:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.774072 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.774143 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.774156 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.774182 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.774196 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:52Z","lastTransitionTime":"2025-10-03T08:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.877584 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.877639 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.877655 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.877678 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.877692 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:52Z","lastTransitionTime":"2025-10-03T08:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.980506 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.980558 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.980573 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.980597 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:52 crc kubenswrapper[4899]: I1003 08:41:52.980610 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:52Z","lastTransitionTime":"2025-10-03T08:41:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.084088 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.084152 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.084171 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.084199 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.084215 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:53Z","lastTransitionTime":"2025-10-03T08:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.187707 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.187771 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.187785 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.187806 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.187819 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:53Z","lastTransitionTime":"2025-10-03T08:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.290497 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.290582 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.290595 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.290620 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.290637 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:53Z","lastTransitionTime":"2025-10-03T08:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.394363 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.394437 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.394455 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.394483 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.394501 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:53Z","lastTransitionTime":"2025-10-03T08:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.476529 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.476592 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.476610 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.476634 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.476649 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:53Z","lastTransitionTime":"2025-10-03T08:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:53 crc kubenswrapper[4899]: E1003 08:41:53.493129 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.498303 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.498352 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.498363 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.498383 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.498393 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:53Z","lastTransitionTime":"2025-10-03T08:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:53 crc kubenswrapper[4899]: E1003 08:41:53.512657 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.518334 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.518419 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.518441 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.518467 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.518485 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:53Z","lastTransitionTime":"2025-10-03T08:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.526583 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.526621 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.526583 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.526583 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:53 crc kubenswrapper[4899]: E1003 08:41:53.526749 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:53 crc kubenswrapper[4899]: E1003 08:41:53.526860 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:53 crc kubenswrapper[4899]: E1003 08:41:53.527000 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:53 crc kubenswrapper[4899]: E1003 08:41:53.527111 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:53 crc kubenswrapper[4899]: E1003 08:41:53.538984 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.544950 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.544991 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.545006 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.545020 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.545034 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:53Z","lastTransitionTime":"2025-10-03T08:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:53 crc kubenswrapper[4899]: E1003 08:41:53.559639 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.564229 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.564273 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.564287 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.564308 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.564321 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:53Z","lastTransitionTime":"2025-10-03T08:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:53 crc kubenswrapper[4899]: E1003 08:41:53.577168 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:53Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:53 crc kubenswrapper[4899]: E1003 08:41:53.577290 4899 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.579420 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.579454 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.579465 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.579539 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.579552 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:53Z","lastTransitionTime":"2025-10-03T08:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.683245 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.683290 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.683304 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.683332 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.683343 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:53Z","lastTransitionTime":"2025-10-03T08:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.786031 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.786071 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.786081 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.786098 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.786109 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:53Z","lastTransitionTime":"2025-10-03T08:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.888974 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.889030 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.889050 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.889089 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.889108 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:53Z","lastTransitionTime":"2025-10-03T08:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.992228 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.992328 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.992354 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.992394 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:53 crc kubenswrapper[4899]: I1003 08:41:53.992419 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:53Z","lastTransitionTime":"2025-10-03T08:41:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.094207 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.094255 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.094266 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.094281 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.094292 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:54Z","lastTransitionTime":"2025-10-03T08:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.196936 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.196961 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.196969 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.196983 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.196992 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:54Z","lastTransitionTime":"2025-10-03T08:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.299875 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.299973 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.299996 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.300026 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.300046 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:54Z","lastTransitionTime":"2025-10-03T08:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.403235 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.403282 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.403294 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.403312 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.403323 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:54Z","lastTransitionTime":"2025-10-03T08:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.506114 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.506163 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.506197 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.506217 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.506228 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:54Z","lastTransitionTime":"2025-10-03T08:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.608298 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.608601 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.608619 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.608635 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.608644 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:54Z","lastTransitionTime":"2025-10-03T08:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.711463 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.711508 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.711517 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.711849 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.711870 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:54Z","lastTransitionTime":"2025-10-03T08:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.814680 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.814733 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.814752 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.814772 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.814785 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:54Z","lastTransitionTime":"2025-10-03T08:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.917423 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.917463 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.917474 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.917491 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:54 crc kubenswrapper[4899]: I1003 08:41:54.917505 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:54Z","lastTransitionTime":"2025-10-03T08:41:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.019593 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.019626 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.019639 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.019654 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.019664 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:55Z","lastTransitionTime":"2025-10-03T08:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.121788 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.121824 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.121837 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.121854 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.121867 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:55Z","lastTransitionTime":"2025-10-03T08:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.223978 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.224018 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.224030 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.224046 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.224055 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:55Z","lastTransitionTime":"2025-10-03T08:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.326329 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.326364 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.326373 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.326407 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.326418 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:55Z","lastTransitionTime":"2025-10-03T08:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.429118 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.429158 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.429169 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.429184 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.429192 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:55Z","lastTransitionTime":"2025-10-03T08:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.526174 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.526252 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.526291 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.526403 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:55 crc kubenswrapper[4899]: E1003 08:41:55.526403 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:55 crc kubenswrapper[4899]: E1003 08:41:55.526460 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:55 crc kubenswrapper[4899]: E1003 08:41:55.526510 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:55 crc kubenswrapper[4899]: E1003 08:41:55.526615 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.531677 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.531703 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.531711 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.531722 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.531731 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:55Z","lastTransitionTime":"2025-10-03T08:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.634220 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.634264 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.634274 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.634290 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.634301 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:55Z","lastTransitionTime":"2025-10-03T08:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.736571 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.736606 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.736616 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.736631 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.736640 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:55Z","lastTransitionTime":"2025-10-03T08:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.838608 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.838660 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.838673 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.838689 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.838700 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:55Z","lastTransitionTime":"2025-10-03T08:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.941409 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.941454 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.941462 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.941476 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:55 crc kubenswrapper[4899]: I1003 08:41:55.941486 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:55Z","lastTransitionTime":"2025-10-03T08:41:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.043567 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.043647 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.043681 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.043708 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.043739 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:56Z","lastTransitionTime":"2025-10-03T08:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.145938 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.145987 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.145997 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.146012 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.146024 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:56Z","lastTransitionTime":"2025-10-03T08:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.247665 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.247727 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.247739 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.247758 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.247770 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:56Z","lastTransitionTime":"2025-10-03T08:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.350540 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.350599 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.350609 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.350631 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.350643 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:56Z","lastTransitionTime":"2025-10-03T08:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.453281 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.453357 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.453373 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.453391 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.453404 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:56Z","lastTransitionTime":"2025-10-03T08:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.538725 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27fd79a9-c016-46aa-8b67-446a831eb2d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:41:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ldv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.540644 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.552388 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9385ff69-ac63-4dac-b8ba-67bcfd9e9d41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2890c36988f29fbea57dfd712a1d66304622780d233996abe521a7a8bf0a932d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://306605d9d3b9065f7c546feb2478296ab23df5ee17f88e9732b78584614b2c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54093b84153ff7894489025339a6977db0f46b4d2a3b49cc60077af28010d315\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.556605 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.556664 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.556674 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.556697 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.556709 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:56Z","lastTransitionTime":"2025-10-03T08:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.568430 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.580786 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.594427 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.607706 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.623066 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.636551 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.649568 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.659040 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.659098 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.659111 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.659134 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.659148 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:56Z","lastTransitionTime":"2025-10-03T08:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.661748 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.682053 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:48Z\\\",\\\"message\\\":\\\"rvices.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/certified-operators_TCP_cluster\\\\\\\", UUID:\\\\\\\"20da2226-531c-4179-9810-aa4026995ca3\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/certified-operators\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/certified-operators_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/certified-operators\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.214\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1003 08:41:48.251140 7043 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:41:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.695399 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.705676 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.716330 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.727263 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb66b7b-06a4-4abf-844a-9dde8b2050e7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f3d24871e87681e41b4247d18fbe1f63d9f6c5d9b2d4e70062453716a55026f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba16d72a68e73e000a876907e9dea4e5ad713c3bf0b4b6478d4fde2efedda458\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba16d72a68e73e000a876907e9dea4e5ad713c3bf0b4b6478d4fde2efedda458\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.739845 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.752247 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2f7940cc405b005b23f102def919753d7820ecc4e45db9f25cb87611611f4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"2025-10-03T08:40:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_49c5c875-7012-4f94-a45d-72a046fb0f24\\\\n2025-10-03T08:40:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_49c5c875-7012-4f94-a45d-72a046fb0f24 to /host/opt/cni/bin/\\\\n2025-10-03T08:40:48Z [verbose] multus-daemon started\\\\n2025-10-03T08:40:48Z [verbose] Readiness Indicator file check\\\\n2025-10-03T08:41:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:41:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.761436 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.761478 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.761490 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.761506 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.761518 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:56Z","lastTransitionTime":"2025-10-03T08:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.767237 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:41:56Z is after 2025-08-24T17:21:41Z" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.864093 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.864127 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.864135 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.864148 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.864157 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:56Z","lastTransitionTime":"2025-10-03T08:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.966349 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.966528 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.966544 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.966560 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:56 crc kubenswrapper[4899]: I1003 08:41:56.966570 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:56Z","lastTransitionTime":"2025-10-03T08:41:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.069261 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.069295 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.069303 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.069317 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.069327 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:57Z","lastTransitionTime":"2025-10-03T08:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.173475 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.173535 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.173549 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.173570 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.173587 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:57Z","lastTransitionTime":"2025-10-03T08:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.275986 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.276031 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.276040 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.276054 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.276062 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:57Z","lastTransitionTime":"2025-10-03T08:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.378279 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.378321 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.378330 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.378344 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.378357 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:57Z","lastTransitionTime":"2025-10-03T08:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.481047 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.481088 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.481099 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.481115 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.481124 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:57Z","lastTransitionTime":"2025-10-03T08:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.526613 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:57 crc kubenswrapper[4899]: E1003 08:41:57.526768 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.526808 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.526621 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:57 crc kubenswrapper[4899]: E1003 08:41:57.526949 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.526972 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:57 crc kubenswrapper[4899]: E1003 08:41:57.527081 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:57 crc kubenswrapper[4899]: E1003 08:41:57.527143 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.583289 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.583321 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.583329 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.583343 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.583353 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:57Z","lastTransitionTime":"2025-10-03T08:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.685770 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.685846 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.685863 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.685880 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.685918 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:57Z","lastTransitionTime":"2025-10-03T08:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.787845 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.787913 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.787925 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.787944 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.787955 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:57Z","lastTransitionTime":"2025-10-03T08:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.889931 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.889966 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.889974 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.889988 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.889997 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:57Z","lastTransitionTime":"2025-10-03T08:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.992005 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.992045 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.992055 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.992071 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:57 crc kubenswrapper[4899]: I1003 08:41:57.992082 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:57Z","lastTransitionTime":"2025-10-03T08:41:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.093665 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.093699 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.093711 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.093726 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.093736 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:58Z","lastTransitionTime":"2025-10-03T08:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.196040 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.196083 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.196095 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.196109 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.196121 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:58Z","lastTransitionTime":"2025-10-03T08:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.298940 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.298975 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.298982 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.298995 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.299003 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:58Z","lastTransitionTime":"2025-10-03T08:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.401161 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.401196 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.401207 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.401222 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.401232 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:58Z","lastTransitionTime":"2025-10-03T08:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.503933 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.503976 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.503985 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.504001 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.504013 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:58Z","lastTransitionTime":"2025-10-03T08:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.606003 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.606050 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.606059 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.606076 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.606086 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:58Z","lastTransitionTime":"2025-10-03T08:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.708659 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.708701 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.708717 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.708745 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.708765 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:58Z","lastTransitionTime":"2025-10-03T08:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.810202 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.810274 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.810287 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.810303 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.810313 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:58Z","lastTransitionTime":"2025-10-03T08:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.912971 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.913019 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.913030 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.913048 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:58 crc kubenswrapper[4899]: I1003 08:41:58.913060 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:58Z","lastTransitionTime":"2025-10-03T08:41:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.015132 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.015171 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.015181 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.015196 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.015207 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:59Z","lastTransitionTime":"2025-10-03T08:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.117846 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.117948 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.117985 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.118007 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.118016 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:59Z","lastTransitionTime":"2025-10-03T08:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.220519 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.220548 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.220558 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.220571 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.220583 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:59Z","lastTransitionTime":"2025-10-03T08:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.323607 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.323645 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.323653 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.323665 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.323673 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:59Z","lastTransitionTime":"2025-10-03T08:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.427571 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.427616 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.427628 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.427643 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.427660 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:59Z","lastTransitionTime":"2025-10-03T08:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.526084 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.526205 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:41:59 crc kubenswrapper[4899]: E1003 08:41:59.526255 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.526284 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:41:59 crc kubenswrapper[4899]: E1003 08:41:59.526386 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.526443 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:41:59 crc kubenswrapper[4899]: E1003 08:41:59.526506 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:41:59 crc kubenswrapper[4899]: E1003 08:41:59.526562 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.529881 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.529957 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.529968 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.530025 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.530040 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:59Z","lastTransitionTime":"2025-10-03T08:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.633126 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.633178 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.633187 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.633203 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.633212 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:59Z","lastTransitionTime":"2025-10-03T08:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.735772 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.735813 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.735821 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.735835 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.735845 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:59Z","lastTransitionTime":"2025-10-03T08:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.838158 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.838261 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.838286 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.838303 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.838314 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:59Z","lastTransitionTime":"2025-10-03T08:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.940604 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.940642 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.940652 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.940666 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:41:59 crc kubenswrapper[4899]: I1003 08:41:59.940675 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:41:59Z","lastTransitionTime":"2025-10-03T08:41:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.042683 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.042721 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.042754 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.042771 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.042782 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:00Z","lastTransitionTime":"2025-10-03T08:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.144976 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.145015 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.145027 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.145044 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.145055 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:00Z","lastTransitionTime":"2025-10-03T08:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.247602 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.247642 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.247653 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.247667 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.247677 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:00Z","lastTransitionTime":"2025-10-03T08:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.350170 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.350221 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.350240 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.350262 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.350278 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:00Z","lastTransitionTime":"2025-10-03T08:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.453076 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.453160 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.453185 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.453212 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.453232 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:00Z","lastTransitionTime":"2025-10-03T08:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.555934 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.556070 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.556084 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.556098 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.556108 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:00Z","lastTransitionTime":"2025-10-03T08:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.658859 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.658922 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.658935 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.658951 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.658961 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:00Z","lastTransitionTime":"2025-10-03T08:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.761475 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.761541 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.761552 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.761566 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.761574 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:00Z","lastTransitionTime":"2025-10-03T08:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.864125 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.864171 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.864184 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.864201 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.864211 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:00Z","lastTransitionTime":"2025-10-03T08:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.966712 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.966783 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.966796 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.966813 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:00 crc kubenswrapper[4899]: I1003 08:42:00.966825 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:00Z","lastTransitionTime":"2025-10-03T08:42:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.069340 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.069595 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.069604 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.069618 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.069627 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:01Z","lastTransitionTime":"2025-10-03T08:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.172084 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.172180 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.172194 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.172215 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.172228 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:01Z","lastTransitionTime":"2025-10-03T08:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.274707 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.274764 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.274776 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.274791 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.274802 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:01Z","lastTransitionTime":"2025-10-03T08:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.377269 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.377310 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.377321 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.377335 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.377344 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:01Z","lastTransitionTime":"2025-10-03T08:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.479932 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.479970 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.479980 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.479994 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.480005 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:01Z","lastTransitionTime":"2025-10-03T08:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.526419 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.526480 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.526493 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:01 crc kubenswrapper[4899]: E1003 08:42:01.526546 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:42:01 crc kubenswrapper[4899]: E1003 08:42:01.526640 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.526662 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:01 crc kubenswrapper[4899]: E1003 08:42:01.526720 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:42:01 crc kubenswrapper[4899]: E1003 08:42:01.526797 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.582655 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.582702 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.582713 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.582730 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.582742 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:01Z","lastTransitionTime":"2025-10-03T08:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.685050 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.685097 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.685106 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.685121 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.685130 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:01Z","lastTransitionTime":"2025-10-03T08:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.787913 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.787958 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.787967 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.787982 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.788000 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:01Z","lastTransitionTime":"2025-10-03T08:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.890332 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.890366 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.890375 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.890388 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.890397 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:01Z","lastTransitionTime":"2025-10-03T08:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.992749 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.992810 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.993075 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.993092 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:01 crc kubenswrapper[4899]: I1003 08:42:01.993103 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:01Z","lastTransitionTime":"2025-10-03T08:42:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.095530 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.095596 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.095606 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.095620 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.095647 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:02Z","lastTransitionTime":"2025-10-03T08:42:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.197566 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.197603 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.197610 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.197624 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.197634 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:02Z","lastTransitionTime":"2025-10-03T08:42:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.299786 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.299846 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.299863 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.299886 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.299955 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:02Z","lastTransitionTime":"2025-10-03T08:42:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.401927 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.401974 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.402000 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.402016 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.402025 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:02Z","lastTransitionTime":"2025-10-03T08:42:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.504778 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.504816 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.504825 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.504839 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.504848 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:02Z","lastTransitionTime":"2025-10-03T08:42:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.606778 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.606816 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.606827 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.606844 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.606854 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:02Z","lastTransitionTime":"2025-10-03T08:42:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.709573 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.709609 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.709618 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.709633 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.709643 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:02Z","lastTransitionTime":"2025-10-03T08:42:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.811931 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.811982 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.811991 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.812009 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.812019 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:02Z","lastTransitionTime":"2025-10-03T08:42:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.914909 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.914962 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.914975 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.914988 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:02 crc kubenswrapper[4899]: I1003 08:42:02.914997 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:02Z","lastTransitionTime":"2025-10-03T08:42:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.017551 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.017599 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.017616 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.017636 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.017648 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:03Z","lastTransitionTime":"2025-10-03T08:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.119612 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.119641 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.119649 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.119661 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.119670 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:03Z","lastTransitionTime":"2025-10-03T08:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.221286 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.221334 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.221342 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.221357 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.221368 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:03Z","lastTransitionTime":"2025-10-03T08:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.323613 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.323649 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.323658 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.323673 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.323684 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:03Z","lastTransitionTime":"2025-10-03T08:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.426066 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.426148 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.426163 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.426178 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.426189 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:03Z","lastTransitionTime":"2025-10-03T08:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.526336 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.526363 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.526363 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.526387 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:03 crc kubenswrapper[4899]: E1003 08:42:03.526485 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:03 crc kubenswrapper[4899]: E1003 08:42:03.526602 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:42:03 crc kubenswrapper[4899]: E1003 08:42:03.526673 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:42:03 crc kubenswrapper[4899]: E1003 08:42:03.526712 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.527987 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.528019 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.528031 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.528048 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.528059 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:03Z","lastTransitionTime":"2025-10-03T08:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.607187 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.607222 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.607231 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.607244 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.607253 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:03Z","lastTransitionTime":"2025-10-03T08:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:03 crc kubenswrapper[4899]: E1003 08:42:03.618842 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:03Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.623071 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.623103 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.623114 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.623129 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.623137 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:03Z","lastTransitionTime":"2025-10-03T08:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:03 crc kubenswrapper[4899]: E1003 08:42:03.635472 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:03Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.639277 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.639323 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.639337 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.639357 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.639370 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:03Z","lastTransitionTime":"2025-10-03T08:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:03 crc kubenswrapper[4899]: E1003 08:42:03.653688 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:03Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.657110 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.657158 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.657168 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.657184 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.657194 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:03Z","lastTransitionTime":"2025-10-03T08:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:03 crc kubenswrapper[4899]: E1003 08:42:03.668105 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:03Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.671309 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.671356 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.671365 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.671377 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.671386 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:03Z","lastTransitionTime":"2025-10-03T08:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:03 crc kubenswrapper[4899]: E1003 08:42:03.686934 4899 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T08:42:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"557489d0-c981-4ed2-aac9-e59d234411ae\\\",\\\"systemUUID\\\":\\\"bb3940af-b89c-4a7a-b7ee-d19044192ef2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:03Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:03 crc kubenswrapper[4899]: E1003 08:42:03.687088 4899 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.688736 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.688777 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.688787 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.688804 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.688816 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:03Z","lastTransitionTime":"2025-10-03T08:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.790920 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.791070 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.791084 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.791101 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.791110 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:03Z","lastTransitionTime":"2025-10-03T08:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.893671 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.893713 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.893724 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.893741 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.893751 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:03Z","lastTransitionTime":"2025-10-03T08:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.995936 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.996159 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.996222 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.996331 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:03 crc kubenswrapper[4899]: I1003 08:42:03.996423 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:03Z","lastTransitionTime":"2025-10-03T08:42:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.056591 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs\") pod \"network-metrics-daemon-ldv5d\" (UID: \"27fd79a9-c016-46aa-8b67-446a831eb2d8\") " pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:04 crc kubenswrapper[4899]: E1003 08:42:04.056700 4899 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 08:42:04 crc kubenswrapper[4899]: E1003 08:42:04.056999 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs podName:27fd79a9-c016-46aa-8b67-446a831eb2d8 nodeName:}" failed. No retries permitted until 2025-10-03 08:43:08.056983973 +0000 UTC m=+162.164468926 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs") pod "network-metrics-daemon-ldv5d" (UID: "27fd79a9-c016-46aa-8b67-446a831eb2d8") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.098497 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.098535 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.098547 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.098562 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.098573 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:04Z","lastTransitionTime":"2025-10-03T08:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.202001 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.202065 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.202077 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.202100 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.202115 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:04Z","lastTransitionTime":"2025-10-03T08:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.303992 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.304034 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.304045 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.304062 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.304079 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:04Z","lastTransitionTime":"2025-10-03T08:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.406625 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.406665 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.406674 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.406687 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.406698 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:04Z","lastTransitionTime":"2025-10-03T08:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.509521 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.509592 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.509605 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.509618 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.509627 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:04Z","lastTransitionTime":"2025-10-03T08:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.527072 4899 scope.go:117] "RemoveContainer" containerID="4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee" Oct 03 08:42:04 crc kubenswrapper[4899]: E1003 08:42:04.527282 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.612363 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.612446 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.612467 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.612503 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.612524 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:04Z","lastTransitionTime":"2025-10-03T08:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.714827 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.714864 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.714873 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.714885 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.714914 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:04Z","lastTransitionTime":"2025-10-03T08:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.818044 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.818088 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.818097 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.818111 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.818120 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:04Z","lastTransitionTime":"2025-10-03T08:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.920050 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.920084 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.920091 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.920104 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:04 crc kubenswrapper[4899]: I1003 08:42:04.920112 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:04Z","lastTransitionTime":"2025-10-03T08:42:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.021531 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.021574 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.021587 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.021603 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.021620 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:05Z","lastTransitionTime":"2025-10-03T08:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.123780 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.123820 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.123830 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.123845 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.123856 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:05Z","lastTransitionTime":"2025-10-03T08:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.226072 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.226122 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.226137 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.226152 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.226162 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:05Z","lastTransitionTime":"2025-10-03T08:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.328970 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.329008 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.329017 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.329030 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.329038 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:05Z","lastTransitionTime":"2025-10-03T08:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.432792 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.432862 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.432875 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.432935 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.432952 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:05Z","lastTransitionTime":"2025-10-03T08:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.526999 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.527024 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:05 crc kubenswrapper[4899]: E1003 08:42:05.527229 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.527055 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.527037 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:05 crc kubenswrapper[4899]: E1003 08:42:05.527323 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:42:05 crc kubenswrapper[4899]: E1003 08:42:05.527423 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:42:05 crc kubenswrapper[4899]: E1003 08:42:05.527584 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.536058 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.536117 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.536131 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.536155 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.536171 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:05Z","lastTransitionTime":"2025-10-03T08:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.638315 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.638380 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.638395 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.638456 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.638468 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:05Z","lastTransitionTime":"2025-10-03T08:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.741265 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.741313 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.741322 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.741340 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.741351 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:05Z","lastTransitionTime":"2025-10-03T08:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.844321 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.844383 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.844397 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.844417 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.844430 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:05Z","lastTransitionTime":"2025-10-03T08:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.947161 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.947203 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.947211 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.947225 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:05 crc kubenswrapper[4899]: I1003 08:42:05.947235 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:05Z","lastTransitionTime":"2025-10-03T08:42:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.049938 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.050008 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.050020 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.050039 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.050051 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:06Z","lastTransitionTime":"2025-10-03T08:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.160031 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.160108 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.160121 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.160147 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.160163 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:06Z","lastTransitionTime":"2025-10-03T08:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.262036 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.262069 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.262078 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.262090 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.262099 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:06Z","lastTransitionTime":"2025-10-03T08:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.364938 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.364982 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.364993 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.365009 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.365018 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:06Z","lastTransitionTime":"2025-10-03T08:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.468062 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.468119 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.468133 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.468151 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.468162 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:06Z","lastTransitionTime":"2025-10-03T08:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.541394 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abf96c2b-051f-4527-9f0d-151989a33188\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d801be103806be8311a255c0d0799db042325fd87e9105f803c3ce7800fb542b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076b5207a85e9b10c8dffc1af725bc002e746033c82ae8c741a8648afd6a9273\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://33f44571c4d7fafd549b68c67a99a8184a24546dffb48f928fb107e1ed14f31c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01b2e775f31ff86e2bdf71d26b943ca28183fcdb6d23696c9c68a9bf28b774d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1fe1407c9312db659d4289d32fdf5bfd80746f250fa9c7758de0becdaf53e2a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 08:40:45.329695 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 08:40:45.329823 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 08:40:45.332599 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2545462435/tls.crt::/tmp/serving-cert-2545462435/tls.key\\\\\\\"\\\\nI1003 08:40:45.630347 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 08:40:45.633364 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 08:40:45.633394 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 08:40:45.633418 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 08:40:45.633425 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 08:40:45.640534 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 08:40:45.640548 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 08:40:45.640574 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640579 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 08:40:45.640583 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 08:40:45.640586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 08:40:45.640588 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 08:40:45.640591 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 08:40:45.642455 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e80ca51b1f7654bb2affa8a3b4789a4c7d4897b443f2b238bb3df05f622b4bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c4259b49fd048c82440ac3c8372f1b03fc98287a69a93e63bd51b0bdf285276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.552697 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d6a18d6-f866-4b43-8f83-9daffdf790ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c29dcc38fc5dcdb7bac6aecc64729e3cae174d28ea684345b519e2a7249ce460\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e383a195efca0d8c804fad47e9bde436f4c520714753e50d4cd4a4607efd073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58fa7c1ac8a7e0b388660be3ec487ca3e31be7950d3480c1d05ff68095aa2fb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde76b46f3d2390a6aee29739cfcfbea2de4d2d8ac899c0edd13ea27fceced2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.564002 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7939788716e17e117234b8a1435b67bf56a552fd651c499ebbd508142dd3717e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.570630 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.570660 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.570670 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.570684 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.570694 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:06Z","lastTransitionTime":"2025-10-03T08:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.580730 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"764a7341-6f52-4fc1-9086-87b90aa126e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:48Z\\\",\\\"message\\\":\\\"rvices.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/certified-operators_TCP_cluster\\\\\\\", UUID:\\\\\\\"20da2226-531c-4179-9810-aa4026995ca3\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/certified-operators\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/certified-operators_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/certified-operators\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.214\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1003 08:41:48.251140 7043 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:41:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7fgv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.597923 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8b2c8335-6dc8-4f78-9954-11cc14b67a96\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e2cb122797729e37384443595011ec18ce7e8666b461e43f9c4e03504474b795\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbca5c4e351b7ac0d1626e29a903cfce19e178b5066c568b7f5ae1adbe1649a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2411b656b734b1df8d0758579ba689f874307d86e3f8b5a61e95d8c4945c3dd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6dc91195ca643523a395a1df1205f27ea6c689b4417eea7b63979fc4c611ebed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c5818c44f66f9b78d76df2e5512225f75faed42cb78d2e8c45f1e0d5dfe768\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f8aa155ea687fce4c2d31b71921835abd2cce4e22801400914a2fb98f573ba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f8aa155ea687fce4c2d31b71921835abd2cce4e22801400914a2fb98f573ba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c0af6b6a558fd2aa16cedbc2a364531f8a578517bd5e100686759206300555b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c0af6b6a558fd2aa16cedbc2a364531f8a578517bd5e100686759206300555b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://34f2fc29bb339c15e73731f1ffa6d4dd1a64a742ec75024a04c96e8957691fd4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34f2fc29bb339c15e73731f1ffa6d4dd1a64a742ec75024a04c96e8957691fd4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.608398 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jpgn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4478ebbd-6973-4ba3-a95a-311406b51cdf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261258171b79be4d546afedf590a115cf92e1ffad9dd461f216f399185442ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v7vb9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jpgn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.619635 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59642124-309c-4d11-9965-c47f9b123e27\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eab4f364e3ec7f70e2276e192d8d4c2c6cc8e2822f4d7ac12f6198f935798bf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd7564933d473a51ceb1289e19cf7ea17049e9cbc134796f287686797157bac2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bf29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hrq4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.635174 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ebdae06ad165b5f168e7edd4d8afa1d602ba1134ac11810abd545c83a3b2fce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e165c02402cd2261d5eca12655302573a2157ddbcffca37cfecf78a253705e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.648357 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.662231 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgdhq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f75d8d8-3b12-42bf-b447-0afb4413fd54\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2f7940cc405b005b23f102def919753d7820ecc4e45db9f25cb87611611f4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T08:41:33Z\\\",\\\"message\\\":\\\"2025-10-03T08:40:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_49c5c875-7012-4f94-a45d-72a046fb0f24\\\\n2025-10-03T08:40:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_49c5c875-7012-4f94-a45d-72a046fb0f24 to /host/opt/cni/bin/\\\\n2025-10-03T08:40:48Z [verbose] multus-daemon started\\\\n2025-10-03T08:40:48Z [verbose] Readiness Indicator file check\\\\n2025-10-03T08:41:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:41:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kxvgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgdhq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.673238 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.673288 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.673298 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.673316 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.673327 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:06Z","lastTransitionTime":"2025-10-03T08:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.677800 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d4e5d0f-c610-483a-a7e0-92c39dce1b12\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12e7158ae8abfde3e873066a70183f684e88d20db37ccd5a4e5251be59f1d27e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b4a94f350de3680d004c16a3f5b6696821ce2d36353aa24e6473f6e9642d7a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a546f95358c452d60d106835bb48a0a00e208529a41967ce0dc2b944d676fa3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7dbb6cc1dd4155ac5ab6c2e80b31bb3d0267b8a68595d70120ffaf84033f06bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbf9017681aa2794ee3e393c7b8988d012259c3f089f331d564ca7037b88c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e0670e53b95c73d96266022971a702c9881bf638b810dfdb7ffe94ac8d30fbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cce72e4a4f3c428f67c1ac0baa0bded7c35ab762d228c15fdf45fee1a45a28f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twt24\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wxhwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.687885 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cb66b7b-06a4-4abf-844a-9dde8b2050e7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f3d24871e87681e41b4247d18fbe1f63d9f6c5d9b2d4e70062453716a55026f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba16d72a68e73e000a876907e9dea4e5ad713c3bf0b4b6478d4fde2efedda458\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba16d72a68e73e000a876907e9dea4e5ad713c3bf0b4b6478d4fde2efedda458\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.700811 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.712273 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.724759 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b8be799fc3405b370193225e6139c0dfad7c46aa3d56942d97e69697ad91e97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.736405 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s9sv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab573102-ad81-4abc-ad0d-c002c08bc84c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4f9b985a1977ffce38409783941cea460e62c66d5b7d1e59eed82b274fde81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g5szw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s9sv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.748254 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3e8a7198-81da-475c-ac88-a460ba4064d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac2ef0658dda46f18217531efabe32e2d13a2462dc3f021015b8add3a1c159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqmnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-t2h4g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.759174 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27fd79a9-c016-46aa-8b67-446a831eb2d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c5pv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:41:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ldv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.771863 4899 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9385ff69-ac63-4dac-b8ba-67bcfd9e9d41\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:41:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T08:40:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2890c36988f29fbea57dfd712a1d66304622780d233996abe521a7a8bf0a932d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://306605d9d3b9065f7c546feb2478296ab23df5ee17f88e9732b78584614b2c3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54093b84153ff7894489025339a6977db0f46b4d2a3b49cc60077af28010d315\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T08:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b658507fbaf0ea4f98774e10f9bf7c34ad6dd9fffef18057d09c287f217091d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T08:40:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T08:40:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T08:40:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T08:42:06Z is after 2025-08-24T17:21:41Z" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.775839 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.775882 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.775917 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.775935 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.775947 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:06Z","lastTransitionTime":"2025-10-03T08:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.878958 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.879026 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.879038 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.879055 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.879069 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:06Z","lastTransitionTime":"2025-10-03T08:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.984158 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.984192 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.984202 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.984222 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:06 crc kubenswrapper[4899]: I1003 08:42:06.984231 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:06Z","lastTransitionTime":"2025-10-03T08:42:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.086878 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.086934 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.086945 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.086961 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.086973 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:07Z","lastTransitionTime":"2025-10-03T08:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.189001 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.189037 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.189045 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.189058 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.189067 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:07Z","lastTransitionTime":"2025-10-03T08:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.291155 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.291195 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.291206 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.291226 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.291239 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:07Z","lastTransitionTime":"2025-10-03T08:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.393221 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.393259 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.393269 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.393284 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.393297 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:07Z","lastTransitionTime":"2025-10-03T08:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.495808 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.495856 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.495873 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.495921 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.495939 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:07Z","lastTransitionTime":"2025-10-03T08:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.526171 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.526207 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.526364 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.526376 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:07 crc kubenswrapper[4899]: E1003 08:42:07.526466 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:42:07 crc kubenswrapper[4899]: E1003 08:42:07.526590 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:42:07 crc kubenswrapper[4899]: E1003 08:42:07.526658 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:07 crc kubenswrapper[4899]: E1003 08:42:07.526709 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.598203 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.598249 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.598257 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.598270 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.598281 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:07Z","lastTransitionTime":"2025-10-03T08:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.700484 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.700532 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.700544 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.700561 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.700574 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:07Z","lastTransitionTime":"2025-10-03T08:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.802496 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.802543 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.802552 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.802566 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.802577 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:07Z","lastTransitionTime":"2025-10-03T08:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.904803 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.904852 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.904867 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.904887 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:07 crc kubenswrapper[4899]: I1003 08:42:07.904943 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:07Z","lastTransitionTime":"2025-10-03T08:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.007137 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.007191 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.007205 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.007224 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.007236 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:08Z","lastTransitionTime":"2025-10-03T08:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.109642 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.109692 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.109739 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.109757 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.109769 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:08Z","lastTransitionTime":"2025-10-03T08:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.212602 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.212647 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.212656 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.212673 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.212682 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:08Z","lastTransitionTime":"2025-10-03T08:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.314392 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.314428 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.314437 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.314450 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.314459 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:08Z","lastTransitionTime":"2025-10-03T08:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.416672 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.416716 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.416727 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.416744 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.416754 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:08Z","lastTransitionTime":"2025-10-03T08:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.519683 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.519742 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.519756 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.519773 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.519784 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:08Z","lastTransitionTime":"2025-10-03T08:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.622566 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.622608 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.622621 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.622634 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.622642 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:08Z","lastTransitionTime":"2025-10-03T08:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.724302 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.724340 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.724349 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.724363 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.724372 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:08Z","lastTransitionTime":"2025-10-03T08:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.826018 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.826068 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.826079 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.826093 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.826104 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:08Z","lastTransitionTime":"2025-10-03T08:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.928158 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.928185 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.928193 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.928206 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:08 crc kubenswrapper[4899]: I1003 08:42:08.928215 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:08Z","lastTransitionTime":"2025-10-03T08:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.030665 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.030700 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.030710 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.030722 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.030732 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:09Z","lastTransitionTime":"2025-10-03T08:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.132833 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.132870 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.132882 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.132901 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.132934 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:09Z","lastTransitionTime":"2025-10-03T08:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.235048 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.235090 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.235114 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.235132 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.235142 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:09Z","lastTransitionTime":"2025-10-03T08:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.338537 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.338593 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.338606 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.338624 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.338640 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:09Z","lastTransitionTime":"2025-10-03T08:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.441487 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.441524 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.441534 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.441547 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.441554 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:09Z","lastTransitionTime":"2025-10-03T08:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.526846 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.527200 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.527211 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.527235 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:09 crc kubenswrapper[4899]: E1003 08:42:09.527362 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:42:09 crc kubenswrapper[4899]: E1003 08:42:09.527451 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:42:09 crc kubenswrapper[4899]: E1003 08:42:09.527505 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:09 crc kubenswrapper[4899]: E1003 08:42:09.527549 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.543342 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.543364 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.543375 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.543389 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.543397 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:09Z","lastTransitionTime":"2025-10-03T08:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.646039 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.646068 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.646076 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.646088 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.646096 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:09Z","lastTransitionTime":"2025-10-03T08:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.748558 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.748618 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.748627 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.748645 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.748654 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:09Z","lastTransitionTime":"2025-10-03T08:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.851632 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.851941 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.852173 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.852370 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.852571 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:09Z","lastTransitionTime":"2025-10-03T08:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.954298 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.954336 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.954346 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.954361 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:09 crc kubenswrapper[4899]: I1003 08:42:09.954371 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:09Z","lastTransitionTime":"2025-10-03T08:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.056976 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.057242 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.057350 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.057432 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.057498 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:10Z","lastTransitionTime":"2025-10-03T08:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.162195 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.162253 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.162265 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.162336 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.162349 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:10Z","lastTransitionTime":"2025-10-03T08:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.264475 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.264513 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.264522 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.264537 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.264545 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:10Z","lastTransitionTime":"2025-10-03T08:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.367511 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.367579 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.367594 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.367618 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.367632 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:10Z","lastTransitionTime":"2025-10-03T08:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.471801 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.471860 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.471874 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.471921 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.471938 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:10Z","lastTransitionTime":"2025-10-03T08:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.575144 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.575206 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.575218 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.575242 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.575257 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:10Z","lastTransitionTime":"2025-10-03T08:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.678398 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.678483 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.678506 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.678539 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.678563 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:10Z","lastTransitionTime":"2025-10-03T08:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.781935 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.782005 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.782020 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.782041 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.782056 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:10Z","lastTransitionTime":"2025-10-03T08:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.885121 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.885176 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.885185 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.885201 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.885210 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:10Z","lastTransitionTime":"2025-10-03T08:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.988484 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.988529 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.988541 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.988558 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:10 crc kubenswrapper[4899]: I1003 08:42:10.988568 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:10Z","lastTransitionTime":"2025-10-03T08:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.091855 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.091934 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.091944 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.091962 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.091974 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:11Z","lastTransitionTime":"2025-10-03T08:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.195189 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.195245 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.195259 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.195281 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.195294 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:11Z","lastTransitionTime":"2025-10-03T08:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.297459 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.297503 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.297515 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.297528 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.297536 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:11Z","lastTransitionTime":"2025-10-03T08:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.399540 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.399586 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.399597 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.399613 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.399623 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:11Z","lastTransitionTime":"2025-10-03T08:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.501317 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.501352 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.501360 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.501403 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.501413 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:11Z","lastTransitionTime":"2025-10-03T08:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.526737 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.526810 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:11 crc kubenswrapper[4899]: E1003 08:42:11.526871 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.526948 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.527034 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:11 crc kubenswrapper[4899]: E1003 08:42:11.527079 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:11 crc kubenswrapper[4899]: E1003 08:42:11.527254 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:42:11 crc kubenswrapper[4899]: E1003 08:42:11.527483 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.603505 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.603538 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.603553 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.603567 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.603577 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:11Z","lastTransitionTime":"2025-10-03T08:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.705202 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.705237 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.705246 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.705260 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.705268 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:11Z","lastTransitionTime":"2025-10-03T08:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.807521 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.807588 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.807598 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.807623 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.807632 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:11Z","lastTransitionTime":"2025-10-03T08:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.910986 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.911054 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.911069 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.911090 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:11 crc kubenswrapper[4899]: I1003 08:42:11.911109 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:11Z","lastTransitionTime":"2025-10-03T08:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.013473 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.013518 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.013530 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.013545 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.013556 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:12Z","lastTransitionTime":"2025-10-03T08:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.115810 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.115864 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.115876 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.115909 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.115918 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:12Z","lastTransitionTime":"2025-10-03T08:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.218022 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.218062 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.218076 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.218094 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.218107 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:12Z","lastTransitionTime":"2025-10-03T08:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.321123 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.321172 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.321183 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.321198 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.321208 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:12Z","lastTransitionTime":"2025-10-03T08:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.423794 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.423826 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.423833 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.423846 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.423856 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:12Z","lastTransitionTime":"2025-10-03T08:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.525803 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.525838 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.525847 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.525861 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.525870 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:12Z","lastTransitionTime":"2025-10-03T08:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.628221 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.628261 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.628271 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.628286 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.628297 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:12Z","lastTransitionTime":"2025-10-03T08:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.730607 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.730654 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.730664 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.730678 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.730686 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:12Z","lastTransitionTime":"2025-10-03T08:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.832837 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.832873 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.832889 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.832923 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.832935 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:12Z","lastTransitionTime":"2025-10-03T08:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.935244 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.935281 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.935293 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.935311 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:12 crc kubenswrapper[4899]: I1003 08:42:12.935322 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:12Z","lastTransitionTime":"2025-10-03T08:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.037161 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.037203 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.037212 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.037224 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.037233 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:13Z","lastTransitionTime":"2025-10-03T08:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.139750 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.139785 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.139797 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.139811 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.139824 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:13Z","lastTransitionTime":"2025-10-03T08:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.241694 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.241732 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.241741 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.241756 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.241766 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:13Z","lastTransitionTime":"2025-10-03T08:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.344075 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.344104 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.344117 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.344138 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.344149 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:13Z","lastTransitionTime":"2025-10-03T08:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.446493 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.446530 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.446540 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.446555 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.446565 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:13Z","lastTransitionTime":"2025-10-03T08:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.526291 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.526319 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:13 crc kubenswrapper[4899]: E1003 08:42:13.526969 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.526478 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:13 crc kubenswrapper[4899]: E1003 08:42:13.526993 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.526460 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:13 crc kubenswrapper[4899]: E1003 08:42:13.527030 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:42:13 crc kubenswrapper[4899]: E1003 08:42:13.527078 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.548493 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.548517 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.548529 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.548541 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.548550 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:13Z","lastTransitionTime":"2025-10-03T08:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.650675 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.650713 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.650721 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.650736 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.650744 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:13Z","lastTransitionTime":"2025-10-03T08:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.752873 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.752925 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.752938 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.752953 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.752964 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:13Z","lastTransitionTime":"2025-10-03T08:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.855013 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.855050 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.855058 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.855072 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.855082 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:13Z","lastTransitionTime":"2025-10-03T08:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.957473 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.957516 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.957526 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.957547 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:13 crc kubenswrapper[4899]: I1003 08:42:13.957559 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:13Z","lastTransitionTime":"2025-10-03T08:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.058227 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.058291 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.058300 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.058315 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.058324 4899 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T08:42:14Z","lastTransitionTime":"2025-10-03T08:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.103775 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw"] Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.104416 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.106790 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.106850 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.109994 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.110678 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.133936 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=23.133886177 podStartE2EDuration="23.133886177s" podCreationTimestamp="2025-10-03 08:41:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:14.122579591 +0000 UTC m=+108.230064544" watchObservedRunningTime="2025-10-03 08:42:14.133886177 +0000 UTC m=+108.241371130" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.162061 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-wxhwc" podStartSLOduration=88.162044933 podStartE2EDuration="1m28.162044933s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:14.160970169 +0000 UTC m=+108.268455122" watchObservedRunningTime="2025-10-03 08:42:14.162044933 +0000 UTC m=+108.269529896" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.162316 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-pgdhq" podStartSLOduration=88.162308082 podStartE2EDuration="1m28.162308082s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:14.146741361 +0000 UTC m=+108.254226324" watchObservedRunningTime="2025-10-03 08:42:14.162308082 +0000 UTC m=+108.269793045" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.173689 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=58.173673929 podStartE2EDuration="58.173673929s" podCreationTimestamp="2025-10-03 08:41:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:14.173532365 +0000 UTC m=+108.281017328" watchObservedRunningTime="2025-10-03 08:42:14.173673929 +0000 UTC m=+108.281158882" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.220999 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-s9sv8" podStartSLOduration=89.220978717 podStartE2EDuration="1m29.220978717s" podCreationTimestamp="2025-10-03 08:40:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:14.220949307 +0000 UTC m=+108.328434260" watchObservedRunningTime="2025-10-03 08:42:14.220978717 +0000 UTC m=+108.328463670" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.231528 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podStartSLOduration=88.231506129 podStartE2EDuration="1m28.231506129s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:14.231404116 +0000 UTC m=+108.338889069" watchObservedRunningTime="2025-10-03 08:42:14.231506129 +0000 UTC m=+108.338991082" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.262308 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0143c1d9-6500-442b-8905-18711a88fb28-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-zq5vw\" (UID: \"0143c1d9-6500-442b-8905-18711a88fb28\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.262369 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/0143c1d9-6500-442b-8905-18711a88fb28-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-zq5vw\" (UID: \"0143c1d9-6500-442b-8905-18711a88fb28\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.262392 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/0143c1d9-6500-442b-8905-18711a88fb28-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-zq5vw\" (UID: \"0143c1d9-6500-442b-8905-18711a88fb28\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.262422 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0143c1d9-6500-442b-8905-18711a88fb28-service-ca\") pod \"cluster-version-operator-5c965bbfc6-zq5vw\" (UID: \"0143c1d9-6500-442b-8905-18711a88fb28\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.262455 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0143c1d9-6500-442b-8905-18711a88fb28-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-zq5vw\" (UID: \"0143c1d9-6500-442b-8905-18711a88fb28\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.279092 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=18.279073246 podStartE2EDuration="18.279073246s" podCreationTimestamp="2025-10-03 08:41:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:14.274303235 +0000 UTC m=+108.381788198" watchObservedRunningTime="2025-10-03 08:42:14.279073246 +0000 UTC m=+108.386558209" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.292071 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=89.292056755 podStartE2EDuration="1m29.292056755s" podCreationTimestamp="2025-10-03 08:40:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:14.291893599 +0000 UTC m=+108.399378552" watchObservedRunningTime="2025-10-03 08:42:14.292056755 +0000 UTC m=+108.399541698" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.305791 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=89.305773806 podStartE2EDuration="1m29.305773806s" podCreationTimestamp="2025-10-03 08:40:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:14.305158457 +0000 UTC m=+108.412643400" watchObservedRunningTime="2025-10-03 08:42:14.305773806 +0000 UTC m=+108.413258759" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.358030 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-jpgn4" podStartSLOduration=88.358013341 podStartE2EDuration="1m28.358013341s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:14.357435502 +0000 UTC m=+108.464920455" watchObservedRunningTime="2025-10-03 08:42:14.358013341 +0000 UTC m=+108.465498294" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.363824 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0143c1d9-6500-442b-8905-18711a88fb28-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-zq5vw\" (UID: \"0143c1d9-6500-442b-8905-18711a88fb28\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.363865 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/0143c1d9-6500-442b-8905-18711a88fb28-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-zq5vw\" (UID: \"0143c1d9-6500-442b-8905-18711a88fb28\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.363916 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/0143c1d9-6500-442b-8905-18711a88fb28-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-zq5vw\" (UID: \"0143c1d9-6500-442b-8905-18711a88fb28\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.363960 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0143c1d9-6500-442b-8905-18711a88fb28-service-ca\") pod \"cluster-version-operator-5c965bbfc6-zq5vw\" (UID: \"0143c1d9-6500-442b-8905-18711a88fb28\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.363993 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0143c1d9-6500-442b-8905-18711a88fb28-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-zq5vw\" (UID: \"0143c1d9-6500-442b-8905-18711a88fb28\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.364024 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/0143c1d9-6500-442b-8905-18711a88fb28-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-zq5vw\" (UID: \"0143c1d9-6500-442b-8905-18711a88fb28\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.364066 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/0143c1d9-6500-442b-8905-18711a88fb28-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-zq5vw\" (UID: \"0143c1d9-6500-442b-8905-18711a88fb28\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.364817 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0143c1d9-6500-442b-8905-18711a88fb28-service-ca\") pod \"cluster-version-operator-5c965bbfc6-zq5vw\" (UID: \"0143c1d9-6500-442b-8905-18711a88fb28\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.369091 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0143c1d9-6500-442b-8905-18711a88fb28-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-zq5vw\" (UID: \"0143c1d9-6500-442b-8905-18711a88fb28\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.380016 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0143c1d9-6500-442b-8905-18711a88fb28-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-zq5vw\" (UID: \"0143c1d9-6500-442b-8905-18711a88fb28\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" Oct 03 08:42:14 crc kubenswrapper[4899]: I1003 08:42:14.430029 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" Oct 03 08:42:15 crc kubenswrapper[4899]: I1003 08:42:15.023133 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" event={"ID":"0143c1d9-6500-442b-8905-18711a88fb28","Type":"ContainerStarted","Data":"49500bebb4d5806ee4631744fcbade9291b80f48ed35e682e521920d0267a5ff"} Oct 03 08:42:15 crc kubenswrapper[4899]: I1003 08:42:15.023182 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" event={"ID":"0143c1d9-6500-442b-8905-18711a88fb28","Type":"ContainerStarted","Data":"d2da55db0eda85a94c69a0a89f5a712251b572c5b839f639f564f02c07042bd8"} Oct 03 08:42:15 crc kubenswrapper[4899]: I1003 08:42:15.036002 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hrq4x" podStartSLOduration=89.035985968 podStartE2EDuration="1m29.035985968s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:14.368573403 +0000 UTC m=+108.476058356" watchObservedRunningTime="2025-10-03 08:42:15.035985968 +0000 UTC m=+109.143470921" Oct 03 08:42:15 crc kubenswrapper[4899]: I1003 08:42:15.036129 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zq5vw" podStartSLOduration=89.036125762 podStartE2EDuration="1m29.036125762s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:15.035233874 +0000 UTC m=+109.142718827" watchObservedRunningTime="2025-10-03 08:42:15.036125762 +0000 UTC m=+109.143610715" Oct 03 08:42:15 crc kubenswrapper[4899]: I1003 08:42:15.526296 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:15 crc kubenswrapper[4899]: E1003 08:42:15.526394 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:42:15 crc kubenswrapper[4899]: I1003 08:42:15.526438 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:15 crc kubenswrapper[4899]: I1003 08:42:15.526755 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:15 crc kubenswrapper[4899]: E1003 08:42:15.526951 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:42:15 crc kubenswrapper[4899]: I1003 08:42:15.526992 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:15 crc kubenswrapper[4899]: I1003 08:42:15.527184 4899 scope.go:117] "RemoveContainer" containerID="4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee" Oct 03 08:42:15 crc kubenswrapper[4899]: E1003 08:42:15.527056 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:15 crc kubenswrapper[4899]: E1003 08:42:15.527319 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f7c_openshift-ovn-kubernetes(764a7341-6f52-4fc1-9086-87b90aa126e8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" Oct 03 08:42:15 crc kubenswrapper[4899]: E1003 08:42:15.527384 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:42:17 crc kubenswrapper[4899]: I1003 08:42:17.526470 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:17 crc kubenswrapper[4899]: I1003 08:42:17.526526 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:17 crc kubenswrapper[4899]: I1003 08:42:17.526585 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:17 crc kubenswrapper[4899]: E1003 08:42:17.526586 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:42:17 crc kubenswrapper[4899]: E1003 08:42:17.526641 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:42:17 crc kubenswrapper[4899]: E1003 08:42:17.526689 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:42:17 crc kubenswrapper[4899]: I1003 08:42:17.526699 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:17 crc kubenswrapper[4899]: E1003 08:42:17.526860 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:19 crc kubenswrapper[4899]: I1003 08:42:19.526354 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:19 crc kubenswrapper[4899]: I1003 08:42:19.526444 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:19 crc kubenswrapper[4899]: I1003 08:42:19.526373 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:19 crc kubenswrapper[4899]: E1003 08:42:19.526498 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:19 crc kubenswrapper[4899]: E1003 08:42:19.526573 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:42:19 crc kubenswrapper[4899]: E1003 08:42:19.526603 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:42:19 crc kubenswrapper[4899]: I1003 08:42:19.526608 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:19 crc kubenswrapper[4899]: E1003 08:42:19.526658 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:42:20 crc kubenswrapper[4899]: I1003 08:42:20.038578 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgdhq_6f75d8d8-3b12-42bf-b447-0afb4413fd54/kube-multus/1.log" Oct 03 08:42:20 crc kubenswrapper[4899]: I1003 08:42:20.039099 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgdhq_6f75d8d8-3b12-42bf-b447-0afb4413fd54/kube-multus/0.log" Oct 03 08:42:20 crc kubenswrapper[4899]: I1003 08:42:20.039148 4899 generic.go:334] "Generic (PLEG): container finished" podID="6f75d8d8-3b12-42bf-b447-0afb4413fd54" containerID="b2f7940cc405b005b23f102def919753d7820ecc4e45db9f25cb87611611f4dc" exitCode=1 Oct 03 08:42:20 crc kubenswrapper[4899]: I1003 08:42:20.039182 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pgdhq" event={"ID":"6f75d8d8-3b12-42bf-b447-0afb4413fd54","Type":"ContainerDied","Data":"b2f7940cc405b005b23f102def919753d7820ecc4e45db9f25cb87611611f4dc"} Oct 03 08:42:20 crc kubenswrapper[4899]: I1003 08:42:20.039221 4899 scope.go:117] "RemoveContainer" containerID="f3896e828005b0f9f9227fbd8b21b7e59cca713f408cebbeb60920e31c64458f" Oct 03 08:42:20 crc kubenswrapper[4899]: I1003 08:42:20.039792 4899 scope.go:117] "RemoveContainer" containerID="b2f7940cc405b005b23f102def919753d7820ecc4e45db9f25cb87611611f4dc" Oct 03 08:42:20 crc kubenswrapper[4899]: E1003 08:42:20.039974 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-pgdhq_openshift-multus(6f75d8d8-3b12-42bf-b447-0afb4413fd54)\"" pod="openshift-multus/multus-pgdhq" podUID="6f75d8d8-3b12-42bf-b447-0afb4413fd54" Oct 03 08:42:21 crc kubenswrapper[4899]: I1003 08:42:21.043437 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgdhq_6f75d8d8-3b12-42bf-b447-0afb4413fd54/kube-multus/1.log" Oct 03 08:42:21 crc kubenswrapper[4899]: I1003 08:42:21.526185 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:21 crc kubenswrapper[4899]: I1003 08:42:21.526247 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:21 crc kubenswrapper[4899]: I1003 08:42:21.526184 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:21 crc kubenswrapper[4899]: E1003 08:42:21.526308 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:21 crc kubenswrapper[4899]: E1003 08:42:21.526366 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:42:21 crc kubenswrapper[4899]: I1003 08:42:21.526198 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:21 crc kubenswrapper[4899]: E1003 08:42:21.526465 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:42:21 crc kubenswrapper[4899]: E1003 08:42:21.526599 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:42:23 crc kubenswrapper[4899]: I1003 08:42:23.526174 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:23 crc kubenswrapper[4899]: I1003 08:42:23.526202 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:23 crc kubenswrapper[4899]: I1003 08:42:23.526284 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:23 crc kubenswrapper[4899]: E1003 08:42:23.526325 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:42:23 crc kubenswrapper[4899]: I1003 08:42:23.526365 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:23 crc kubenswrapper[4899]: E1003 08:42:23.526524 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:42:23 crc kubenswrapper[4899]: E1003 08:42:23.526750 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:23 crc kubenswrapper[4899]: E1003 08:42:23.526775 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:42:25 crc kubenswrapper[4899]: I1003 08:42:25.526297 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:25 crc kubenswrapper[4899]: I1003 08:42:25.526428 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:25 crc kubenswrapper[4899]: I1003 08:42:25.526456 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:25 crc kubenswrapper[4899]: E1003 08:42:25.526681 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:42:25 crc kubenswrapper[4899]: I1003 08:42:25.526946 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:25 crc kubenswrapper[4899]: E1003 08:42:25.527068 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:42:25 crc kubenswrapper[4899]: E1003 08:42:25.527189 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:42:25 crc kubenswrapper[4899]: E1003 08:42:25.527338 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:26 crc kubenswrapper[4899]: E1003 08:42:26.584309 4899 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 03 08:42:26 crc kubenswrapper[4899]: E1003 08:42:26.620912 4899 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 03 08:42:27 crc kubenswrapper[4899]: I1003 08:42:27.526489 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:27 crc kubenswrapper[4899]: I1003 08:42:27.526554 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:27 crc kubenswrapper[4899]: I1003 08:42:27.526580 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:27 crc kubenswrapper[4899]: E1003 08:42:27.526685 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:42:27 crc kubenswrapper[4899]: E1003 08:42:27.526726 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:42:27 crc kubenswrapper[4899]: E1003 08:42:27.526771 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:42:27 crc kubenswrapper[4899]: I1003 08:42:27.527031 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:27 crc kubenswrapper[4899]: E1003 08:42:27.527231 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:29 crc kubenswrapper[4899]: I1003 08:42:29.526315 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:29 crc kubenswrapper[4899]: I1003 08:42:29.526407 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:29 crc kubenswrapper[4899]: E1003 08:42:29.526440 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:42:29 crc kubenswrapper[4899]: I1003 08:42:29.526334 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:29 crc kubenswrapper[4899]: I1003 08:42:29.526515 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:29 crc kubenswrapper[4899]: E1003 08:42:29.526713 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:29 crc kubenswrapper[4899]: E1003 08:42:29.527060 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:42:29 crc kubenswrapper[4899]: E1003 08:42:29.527176 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:42:29 crc kubenswrapper[4899]: I1003 08:42:29.527218 4899 scope.go:117] "RemoveContainer" containerID="4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee" Oct 03 08:42:30 crc kubenswrapper[4899]: I1003 08:42:30.074308 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovnkube-controller/3.log" Oct 03 08:42:30 crc kubenswrapper[4899]: I1003 08:42:30.077096 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerStarted","Data":"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331"} Oct 03 08:42:30 crc kubenswrapper[4899]: I1003 08:42:30.077623 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:42:30 crc kubenswrapper[4899]: I1003 08:42:30.103027 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podStartSLOduration=104.10301122 podStartE2EDuration="1m44.10301122s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:30.102612698 +0000 UTC m=+124.210097661" watchObservedRunningTime="2025-10-03 08:42:30.10301122 +0000 UTC m=+124.210496173" Oct 03 08:42:30 crc kubenswrapper[4899]: I1003 08:42:30.303715 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-ldv5d"] Oct 03 08:42:30 crc kubenswrapper[4899]: I1003 08:42:30.303829 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:30 crc kubenswrapper[4899]: E1003 08:42:30.303947 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:31 crc kubenswrapper[4899]: I1003 08:42:31.526390 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:31 crc kubenswrapper[4899]: I1003 08:42:31.526434 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:31 crc kubenswrapper[4899]: E1003 08:42:31.526498 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:42:31 crc kubenswrapper[4899]: I1003 08:42:31.526390 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:31 crc kubenswrapper[4899]: E1003 08:42:31.526554 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:42:31 crc kubenswrapper[4899]: E1003 08:42:31.526698 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:42:31 crc kubenswrapper[4899]: E1003 08:42:31.622663 4899 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 03 08:42:32 crc kubenswrapper[4899]: I1003 08:42:32.526763 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:32 crc kubenswrapper[4899]: E1003 08:42:32.527164 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:33 crc kubenswrapper[4899]: I1003 08:42:33.526758 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:33 crc kubenswrapper[4899]: I1003 08:42:33.526829 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:33 crc kubenswrapper[4899]: I1003 08:42:33.526784 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:33 crc kubenswrapper[4899]: E1003 08:42:33.526938 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:42:33 crc kubenswrapper[4899]: E1003 08:42:33.527014 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:42:33 crc kubenswrapper[4899]: E1003 08:42:33.527110 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:42:34 crc kubenswrapper[4899]: I1003 08:42:34.526174 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:34 crc kubenswrapper[4899]: I1003 08:42:34.526305 4899 scope.go:117] "RemoveContainer" containerID="b2f7940cc405b005b23f102def919753d7820ecc4e45db9f25cb87611611f4dc" Oct 03 08:42:34 crc kubenswrapper[4899]: E1003 08:42:34.526453 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:35 crc kubenswrapper[4899]: I1003 08:42:35.090963 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgdhq_6f75d8d8-3b12-42bf-b447-0afb4413fd54/kube-multus/1.log" Oct 03 08:42:35 crc kubenswrapper[4899]: I1003 08:42:35.091014 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pgdhq" event={"ID":"6f75d8d8-3b12-42bf-b447-0afb4413fd54","Type":"ContainerStarted","Data":"46a6450698daebd9728ad0b6a8bde26f5c4b700e695dc1dc1f7d33663c96b564"} Oct 03 08:42:35 crc kubenswrapper[4899]: I1003 08:42:35.526395 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:35 crc kubenswrapper[4899]: I1003 08:42:35.526457 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:35 crc kubenswrapper[4899]: I1003 08:42:35.526413 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:35 crc kubenswrapper[4899]: E1003 08:42:35.526537 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 08:42:35 crc kubenswrapper[4899]: E1003 08:42:35.526619 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 08:42:35 crc kubenswrapper[4899]: E1003 08:42:35.526781 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 08:42:36 crc kubenswrapper[4899]: I1003 08:42:36.526274 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:36 crc kubenswrapper[4899]: E1003 08:42:36.527386 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ldv5d" podUID="27fd79a9-c016-46aa-8b67-446a831eb2d8" Oct 03 08:42:37 crc kubenswrapper[4899]: I1003 08:42:37.429434 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:42:37 crc kubenswrapper[4899]: I1003 08:42:37.526755 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:37 crc kubenswrapper[4899]: I1003 08:42:37.526792 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:37 crc kubenswrapper[4899]: I1003 08:42:37.526862 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:37 crc kubenswrapper[4899]: I1003 08:42:37.528589 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 03 08:42:37 crc kubenswrapper[4899]: I1003 08:42:37.528594 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 03 08:42:37 crc kubenswrapper[4899]: I1003 08:42:37.529073 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 03 08:42:37 crc kubenswrapper[4899]: I1003 08:42:37.529079 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 03 08:42:38 crc kubenswrapper[4899]: I1003 08:42:38.526757 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:42:38 crc kubenswrapper[4899]: I1003 08:42:38.528962 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 03 08:42:38 crc kubenswrapper[4899]: I1003 08:42:38.529169 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.366263 4899 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.397282 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-lw5xr"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.398109 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-lw5xr" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.398608 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-v5crb"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.399287 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.400094 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-hbstw"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.400610 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.400760 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.401696 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.403055 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.403999 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-k28dm"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.404650 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.408023 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xg5bn"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.418310 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.418386 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.419131 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.419153 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.419180 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.419567 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.419628 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.419628 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.419837 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.420057 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.420308 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.420321 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.420460 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-462ht"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.420511 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.420529 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.420664 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.420856 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2ghv5"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.420950 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.420974 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.421094 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z2lfm"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.421123 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.421176 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.421238 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.421317 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.421349 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z2lfm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.421439 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.421124 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.421627 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.421705 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.421732 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2ghv5" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.421829 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.421710 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-462ht" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.422060 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.422193 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.422409 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.422599 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.423036 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.423045 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.423111 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.424755 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55cqf"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.425185 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.425516 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.425524 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-kzhtk"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.425751 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55cqf" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.426161 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.426367 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-kzhtk" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.428949 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.429654 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.430219 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.430642 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.431021 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.432271 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.435786 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.435957 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-kgddg"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.436797 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kgddg" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.437280 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-pmzmr"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.437787 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.438779 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tkpkx"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.439277 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.440715 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-dpwsf"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.441168 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-dpwsf" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.441924 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zqzww"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.455119 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.456551 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zqzww" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.457661 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.457794 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.457982 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.458000 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.458040 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.458178 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l5vs7"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.458380 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.458648 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.458752 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.458995 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.460137 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.460404 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.462115 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.464424 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478010 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-console-serving-cert\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478074 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/dd179c6a-42d1-41bb-a584-dd843ec84bb4-metrics-tls\") pod \"dns-operator-744455d44c-462ht\" (UID: \"dd179c6a-42d1-41bb-a584-dd843ec84bb4\") " pod="openshift-dns-operator/dns-operator-744455d44c-462ht" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478101 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d7f325c2-54b2-42b3-bcab-466aa71dc831-etcd-serving-ca\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478124 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1c74fca8-3a6a-4253-9284-dd6417e8420a-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xg5bn\" (UID: \"1c74fca8-3a6a-4253-9284-dd6417e8420a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478144 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1c74fca8-3a6a-4253-9284-dd6417e8420a-service-ca-bundle\") pod \"authentication-operator-69f744f599-xg5bn\" (UID: \"1c74fca8-3a6a-4253-9284-dd6417e8420a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478162 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d7f325c2-54b2-42b3-bcab-466aa71dc831-audit-dir\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478181 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19c551e7-757b-4136-a143-6b6aa8152c57-serving-cert\") pod \"controller-manager-879f6c89f-hbstw\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478206 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5a7bb2d-e3f4-4c2e-9d78-483724280890-serving-cert\") pod \"route-controller-manager-6576b87f9c-dtsxp\" (UID: \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478224 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9q4p\" (UniqueName: \"kubernetes.io/projected/a8b9468d-675b-42d9-b5e8-b45f5d35deef-kube-api-access-q9q4p\") pod \"machine-api-operator-5694c8668f-lw5xr\" (UID: \"a8b9468d-675b-42d9-b5e8-b45f5d35deef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lw5xr" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478242 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478246 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5a7bb2d-e3f4-4c2e-9d78-483724280890-config\") pod \"route-controller-manager-6576b87f9c-dtsxp\" (UID: \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478431 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7fa6a508-d3a9-44de-a87a-4a5c474ad589-machine-approver-tls\") pod \"machine-approver-56656f9798-b8pqc\" (UID: \"7fa6a508-d3a9-44de-a87a-4a5c474ad589\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478453 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lw5j8\" (UniqueName: \"kubernetes.io/projected/ed905c2a-053e-4094-bb3b-91412f1ec0f8-kube-api-access-lw5j8\") pod \"cluster-samples-operator-665b6dd947-55cqf\" (UID: \"ed905c2a-053e-4094-bb3b-91412f1ec0f8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55cqf" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478472 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-trusted-ca-bundle\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478488 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c6e656f-7683-4fe1-9a4f-00885aa67657-config\") pod \"openshift-apiserver-operator-796bbdcf4f-2ghv5\" (UID: \"4c6e656f-7683-4fe1-9a4f-00885aa67657\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2ghv5" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478520 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e8f9f430-36b0-475f-830b-cf5e0c84ed59-trusted-ca\") pod \"console-operator-58897d9998-kzhtk\" (UID: \"e8f9f430-36b0-475f-830b-cf5e0c84ed59\") " pod="openshift-console-operator/console-operator-58897d9998-kzhtk" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478541 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6t5k\" (UniqueName: \"kubernetes.io/projected/19c551e7-757b-4136-a143-6b6aa8152c57-kube-api-access-c6t5k\") pod \"controller-manager-879f6c89f-hbstw\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478555 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8b9468d-675b-42d9-b5e8-b45f5d35deef-config\") pod \"machine-api-operator-5694c8668f-lw5xr\" (UID: \"a8b9468d-675b-42d9-b5e8-b45f5d35deef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lw5xr" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478578 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bz5m\" (UniqueName: \"kubernetes.io/projected/4c6e656f-7683-4fe1-9a4f-00885aa67657-kube-api-access-6bz5m\") pod \"openshift-apiserver-operator-796bbdcf4f-2ghv5\" (UID: \"4c6e656f-7683-4fe1-9a4f-00885aa67657\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2ghv5" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478596 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-service-ca\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478613 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zl7zj\" (UniqueName: \"kubernetes.io/projected/a5a7bb2d-e3f4-4c2e-9d78-483724280890-kube-api-access-zl7zj\") pod \"route-controller-manager-6576b87f9c-dtsxp\" (UID: \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478629 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a8b9468d-675b-42d9-b5e8-b45f5d35deef-images\") pod \"machine-api-operator-5694c8668f-lw5xr\" (UID: \"a8b9468d-675b-42d9-b5e8-b45f5d35deef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lw5xr" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478644 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-console-oauth-config\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478662 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzj8t\" (UniqueName: \"kubernetes.io/projected/e8f9f430-36b0-475f-830b-cf5e0c84ed59-kube-api-access-fzj8t\") pod \"console-operator-58897d9998-kzhtk\" (UID: \"e8f9f430-36b0-475f-830b-cf5e0c84ed59\") " pod="openshift-console-operator/console-operator-58897d9998-kzhtk" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478678 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d7f325c2-54b2-42b3-bcab-466aa71dc831-trusted-ca-bundle\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478693 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztswx\" (UniqueName: \"kubernetes.io/projected/d7f325c2-54b2-42b3-bcab-466aa71dc831-kube-api-access-ztswx\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478721 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxfjk\" (UniqueName: \"kubernetes.io/projected/dd179c6a-42d1-41bb-a584-dd843ec84bb4-kube-api-access-kxfjk\") pod \"dns-operator-744455d44c-462ht\" (UID: \"dd179c6a-42d1-41bb-a584-dd843ec84bb4\") " pod="openshift-dns-operator/dns-operator-744455d44c-462ht" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478757 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34cebeb0-e452-40a4-9c67-43353a244e15-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-z2lfm\" (UID: \"34cebeb0-e452-40a4-9c67-43353a244e15\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z2lfm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478777 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c74fca8-3a6a-4253-9284-dd6417e8420a-config\") pod \"authentication-operator-69f744f599-xg5bn\" (UID: \"1c74fca8-3a6a-4253-9284-dd6417e8420a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478797 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8f9f430-36b0-475f-830b-cf5e0c84ed59-config\") pod \"console-operator-58897d9998-kzhtk\" (UID: \"e8f9f430-36b0-475f-830b-cf5e0c84ed59\") " pod="openshift-console-operator/console-operator-58897d9998-kzhtk" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478819 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7f325c2-54b2-42b3-bcab-466aa71dc831-config\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478838 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d7f325c2-54b2-42b3-bcab-466aa71dc831-encryption-config\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478854 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-console-config\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478876 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a5a7bb2d-e3f4-4c2e-9d78-483724280890-client-ca\") pod \"route-controller-manager-6576b87f9c-dtsxp\" (UID: \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478906 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8swk\" (UniqueName: \"kubernetes.io/projected/7fa6a508-d3a9-44de-a87a-4a5c474ad589-kube-api-access-x8swk\") pod \"machine-approver-56656f9798-b8pqc\" (UID: \"7fa6a508-d3a9-44de-a87a-4a5c474ad589\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478926 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c6e656f-7683-4fe1-9a4f-00885aa67657-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-2ghv5\" (UID: \"4c6e656f-7683-4fe1-9a4f-00885aa67657\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2ghv5" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478942 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d7f325c2-54b2-42b3-bcab-466aa71dc831-node-pullsecrets\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.478982 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7fa6a508-d3a9-44de-a87a-4a5c474ad589-auth-proxy-config\") pod \"machine-approver-56656f9798-b8pqc\" (UID: \"7fa6a508-d3a9-44de-a87a-4a5c474ad589\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.479010 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19c551e7-757b-4136-a143-6b6aa8152c57-config\") pod \"controller-manager-879f6c89f-hbstw\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.479033 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34cebeb0-e452-40a4-9c67-43353a244e15-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-z2lfm\" (UID: \"34cebeb0-e452-40a4-9c67-43353a244e15\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z2lfm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.479055 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fa6a508-d3a9-44de-a87a-4a5c474ad589-config\") pod \"machine-approver-56656f9798-b8pqc\" (UID: \"7fa6a508-d3a9-44de-a87a-4a5c474ad589\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.479074 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ed905c2a-053e-4094-bb3b-91412f1ec0f8-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-55cqf\" (UID: \"ed905c2a-053e-4094-bb3b-91412f1ec0f8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55cqf" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.479091 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19c551e7-757b-4136-a143-6b6aa8152c57-client-ca\") pod \"controller-manager-879f6c89f-hbstw\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.479109 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/19c551e7-757b-4136-a143-6b6aa8152c57-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-hbstw\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.479125 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8f9f430-36b0-475f-830b-cf5e0c84ed59-serving-cert\") pod \"console-operator-58897d9998-kzhtk\" (UID: \"e8f9f430-36b0-475f-830b-cf5e0c84ed59\") " pod="openshift-console-operator/console-operator-58897d9998-kzhtk" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.479145 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnqdg\" (UniqueName: \"kubernetes.io/projected/34cebeb0-e452-40a4-9c67-43353a244e15-kube-api-access-xnqdg\") pod \"openshift-controller-manager-operator-756b6f6bc6-z2lfm\" (UID: \"34cebeb0-e452-40a4-9c67-43353a244e15\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z2lfm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.479161 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d7f325c2-54b2-42b3-bcab-466aa71dc831-etcd-client\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.479174 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c74fca8-3a6a-4253-9284-dd6417e8420a-serving-cert\") pod \"authentication-operator-69f744f599-xg5bn\" (UID: \"1c74fca8-3a6a-4253-9284-dd6417e8420a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.479189 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/d7f325c2-54b2-42b3-bcab-466aa71dc831-image-import-ca\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.479207 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6nbs\" (UniqueName: \"kubernetes.io/projected/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-kube-api-access-s6nbs\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.479223 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/a8b9468d-675b-42d9-b5e8-b45f5d35deef-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-lw5xr\" (UID: \"a8b9468d-675b-42d9-b5e8-b45f5d35deef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lw5xr" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.479240 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsg4r\" (UniqueName: \"kubernetes.io/projected/1c74fca8-3a6a-4253-9284-dd6417e8420a-kube-api-access-rsg4r\") pod \"authentication-operator-69f744f599-xg5bn\" (UID: \"1c74fca8-3a6a-4253-9284-dd6417e8420a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.479254 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-oauth-serving-cert\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.479286 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/d7f325c2-54b2-42b3-bcab-466aa71dc831-audit\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.479318 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d7f325c2-54b2-42b3-bcab-466aa71dc831-serving-cert\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.479960 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.480222 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.480362 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.480489 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.480722 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.480921 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.481017 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.481247 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.481340 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.481357 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.481404 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.481495 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.481543 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.481564 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.481256 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.481509 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.481723 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.481749 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.481783 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.481868 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.481974 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.481991 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.482084 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.482093 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.482178 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.482187 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.482464 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.481727 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.482553 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.482620 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.482686 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.482759 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.483263 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.483366 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.483455 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.483542 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.483627 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.486571 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.486817 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.489281 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.489451 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.489843 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.490118 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.490259 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.490329 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.490394 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.490498 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.495720 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.496142 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-nrt8l"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.496760 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-kb4vv"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.497133 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.497180 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.497343 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f8jvt"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.497559 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-nrt8l" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.497709 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-brt2c"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.497773 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f8jvt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.497767 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kb4vv" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.498347 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.498591 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-tfknb"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.498754 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-brt2c" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.503300 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.507347 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.510143 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t9nkq"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.510561 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-mwh8q"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.511166 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mwh8q" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.511537 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.511746 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t9nkq" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.514944 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5j62"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.515641 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5j62" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.520845 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.521729 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.524098 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.524999 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.532120 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.532454 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.532981 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.533116 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.576841 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583249 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8f9f430-36b0-475f-830b-cf5e0c84ed59-config\") pod \"console-operator-58897d9998-kzhtk\" (UID: \"e8f9f430-36b0-475f-830b-cf5e0c84ed59\") " pod="openshift-console-operator/console-operator-58897d9998-kzhtk" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583292 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34cebeb0-e452-40a4-9c67-43353a244e15-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-z2lfm\" (UID: \"34cebeb0-e452-40a4-9c67-43353a244e15\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z2lfm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583314 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c74fca8-3a6a-4253-9284-dd6417e8420a-config\") pod \"authentication-operator-69f744f599-xg5bn\" (UID: \"1c74fca8-3a6a-4253-9284-dd6417e8420a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583345 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7f325c2-54b2-42b3-bcab-466aa71dc831-config\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583377 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d7f325c2-54b2-42b3-bcab-466aa71dc831-encryption-config\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583397 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-console-config\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583418 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8swk\" (UniqueName: \"kubernetes.io/projected/7fa6a508-d3a9-44de-a87a-4a5c474ad589-kube-api-access-x8swk\") pod \"machine-approver-56656f9798-b8pqc\" (UID: \"7fa6a508-d3a9-44de-a87a-4a5c474ad589\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583441 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a5a7bb2d-e3f4-4c2e-9d78-483724280890-client-ca\") pod \"route-controller-manager-6576b87f9c-dtsxp\" (UID: \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583460 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c6e656f-7683-4fe1-9a4f-00885aa67657-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-2ghv5\" (UID: \"4c6e656f-7683-4fe1-9a4f-00885aa67657\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2ghv5" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583486 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7fa6a508-d3a9-44de-a87a-4a5c474ad589-auth-proxy-config\") pod \"machine-approver-56656f9798-b8pqc\" (UID: \"7fa6a508-d3a9-44de-a87a-4a5c474ad589\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583507 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19c551e7-757b-4136-a143-6b6aa8152c57-config\") pod \"controller-manager-879f6c89f-hbstw\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583527 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d7f325c2-54b2-42b3-bcab-466aa71dc831-node-pullsecrets\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583546 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34cebeb0-e452-40a4-9c67-43353a244e15-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-z2lfm\" (UID: \"34cebeb0-e452-40a4-9c67-43353a244e15\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z2lfm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583565 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fa6a508-d3a9-44de-a87a-4a5c474ad589-config\") pod \"machine-approver-56656f9798-b8pqc\" (UID: \"7fa6a508-d3a9-44de-a87a-4a5c474ad589\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583588 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ed905c2a-053e-4094-bb3b-91412f1ec0f8-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-55cqf\" (UID: \"ed905c2a-053e-4094-bb3b-91412f1ec0f8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55cqf" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583618 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19c551e7-757b-4136-a143-6b6aa8152c57-client-ca\") pod \"controller-manager-879f6c89f-hbstw\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583640 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/19c551e7-757b-4136-a143-6b6aa8152c57-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-hbstw\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583660 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8f9f430-36b0-475f-830b-cf5e0c84ed59-serving-cert\") pod \"console-operator-58897d9998-kzhtk\" (UID: \"e8f9f430-36b0-475f-830b-cf5e0c84ed59\") " pod="openshift-console-operator/console-operator-58897d9998-kzhtk" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583683 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnqdg\" (UniqueName: \"kubernetes.io/projected/34cebeb0-e452-40a4-9c67-43353a244e15-kube-api-access-xnqdg\") pod \"openshift-controller-manager-operator-756b6f6bc6-z2lfm\" (UID: \"34cebeb0-e452-40a4-9c67-43353a244e15\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z2lfm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583712 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1bf00f2e-253d-4ce1-afeb-e559137c9488-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-zqzww\" (UID: \"1bf00f2e-253d-4ce1-afeb-e559137c9488\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zqzww" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583736 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d7f325c2-54b2-42b3-bcab-466aa71dc831-etcd-client\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583757 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf00f2e-253d-4ce1-afeb-e559137c9488-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-zqzww\" (UID: \"1bf00f2e-253d-4ce1-afeb-e559137c9488\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zqzww" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583780 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c74fca8-3a6a-4253-9284-dd6417e8420a-serving-cert\") pod \"authentication-operator-69f744f599-xg5bn\" (UID: \"1c74fca8-3a6a-4253-9284-dd6417e8420a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583802 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prlbd\" (UniqueName: \"kubernetes.io/projected/1e4eab70-7ebf-4f07-883b-8d08e0453ec1-kube-api-access-prlbd\") pod \"cluster-image-registry-operator-dc59b4c8b-llzvf\" (UID: \"1e4eab70-7ebf-4f07-883b-8d08e0453ec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583826 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/d7f325c2-54b2-42b3-bcab-466aa71dc831-image-import-ca\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583847 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6nbs\" (UniqueName: \"kubernetes.io/projected/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-kube-api-access-s6nbs\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583871 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf00f2e-253d-4ce1-afeb-e559137c9488-config\") pod \"kube-apiserver-operator-766d6c64bb-zqzww\" (UID: \"1bf00f2e-253d-4ce1-afeb-e559137c9488\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zqzww" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583912 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/a8b9468d-675b-42d9-b5e8-b45f5d35deef-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-lw5xr\" (UID: \"a8b9468d-675b-42d9-b5e8-b45f5d35deef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lw5xr" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583937 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsg4r\" (UniqueName: \"kubernetes.io/projected/1c74fca8-3a6a-4253-9284-dd6417e8420a-kube-api-access-rsg4r\") pod \"authentication-operator-69f744f599-xg5bn\" (UID: \"1c74fca8-3a6a-4253-9284-dd6417e8420a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583961 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/d7f325c2-54b2-42b3-bcab-466aa71dc831-audit\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.583981 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-oauth-serving-cert\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584009 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d7f325c2-54b2-42b3-bcab-466aa71dc831-serving-cert\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584034 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1e4eab70-7ebf-4f07-883b-8d08e0453ec1-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-llzvf\" (UID: \"1e4eab70-7ebf-4f07-883b-8d08e0453ec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584076 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-console-serving-cert\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584099 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/1e4eab70-7ebf-4f07-883b-8d08e0453ec1-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-llzvf\" (UID: \"1e4eab70-7ebf-4f07-883b-8d08e0453ec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584124 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1c74fca8-3a6a-4253-9284-dd6417e8420a-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xg5bn\" (UID: \"1c74fca8-3a6a-4253-9284-dd6417e8420a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584146 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/dd179c6a-42d1-41bb-a584-dd843ec84bb4-metrics-tls\") pod \"dns-operator-744455d44c-462ht\" (UID: \"dd179c6a-42d1-41bb-a584-dd843ec84bb4\") " pod="openshift-dns-operator/dns-operator-744455d44c-462ht" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584166 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d7f325c2-54b2-42b3-bcab-466aa71dc831-etcd-serving-ca\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584190 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1c74fca8-3a6a-4253-9284-dd6417e8420a-service-ca-bundle\") pod \"authentication-operator-69f744f599-xg5bn\" (UID: \"1c74fca8-3a6a-4253-9284-dd6417e8420a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584210 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d7f325c2-54b2-42b3-bcab-466aa71dc831-audit-dir\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584232 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19c551e7-757b-4136-a143-6b6aa8152c57-serving-cert\") pod \"controller-manager-879f6c89f-hbstw\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584257 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5a7bb2d-e3f4-4c2e-9d78-483724280890-config\") pod \"route-controller-manager-6576b87f9c-dtsxp\" (UID: \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584277 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5a7bb2d-e3f4-4c2e-9d78-483724280890-serving-cert\") pod \"route-controller-manager-6576b87f9c-dtsxp\" (UID: \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584299 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9q4p\" (UniqueName: \"kubernetes.io/projected/a8b9468d-675b-42d9-b5e8-b45f5d35deef-kube-api-access-q9q4p\") pod \"machine-api-operator-5694c8668f-lw5xr\" (UID: \"a8b9468d-675b-42d9-b5e8-b45f5d35deef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lw5xr" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584332 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7fa6a508-d3a9-44de-a87a-4a5c474ad589-machine-approver-tls\") pod \"machine-approver-56656f9798-b8pqc\" (UID: \"7fa6a508-d3a9-44de-a87a-4a5c474ad589\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584354 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lw5j8\" (UniqueName: \"kubernetes.io/projected/ed905c2a-053e-4094-bb3b-91412f1ec0f8-kube-api-access-lw5j8\") pod \"cluster-samples-operator-665b6dd947-55cqf\" (UID: \"ed905c2a-053e-4094-bb3b-91412f1ec0f8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55cqf" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584376 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-trusted-ca-bundle\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584395 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c6e656f-7683-4fe1-9a4f-00885aa67657-config\") pod \"openshift-apiserver-operator-796bbdcf4f-2ghv5\" (UID: \"4c6e656f-7683-4fe1-9a4f-00885aa67657\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2ghv5" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584416 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e8f9f430-36b0-475f-830b-cf5e0c84ed59-trusted-ca\") pod \"console-operator-58897d9998-kzhtk\" (UID: \"e8f9f430-36b0-475f-830b-cf5e0c84ed59\") " pod="openshift-console-operator/console-operator-58897d9998-kzhtk" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584437 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6t5k\" (UniqueName: \"kubernetes.io/projected/19c551e7-757b-4136-a143-6b6aa8152c57-kube-api-access-c6t5k\") pod \"controller-manager-879f6c89f-hbstw\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584456 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8b9468d-675b-42d9-b5e8-b45f5d35deef-config\") pod \"machine-api-operator-5694c8668f-lw5xr\" (UID: \"a8b9468d-675b-42d9-b5e8-b45f5d35deef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lw5xr" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584480 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a9c7a81-f2a5-48a4-a202-008603e07184-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5j62\" (UID: \"1a9c7a81-f2a5-48a4-a202-008603e07184\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5j62" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584504 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qjjz\" (UniqueName: \"kubernetes.io/projected/1a9c7a81-f2a5-48a4-a202-008603e07184-kube-api-access-4qjjz\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5j62\" (UID: \"1a9c7a81-f2a5-48a4-a202-008603e07184\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5j62" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584527 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-service-ca\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584550 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bz5m\" (UniqueName: \"kubernetes.io/projected/4c6e656f-7683-4fe1-9a4f-00885aa67657-kube-api-access-6bz5m\") pod \"openshift-apiserver-operator-796bbdcf4f-2ghv5\" (UID: \"4c6e656f-7683-4fe1-9a4f-00885aa67657\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2ghv5" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584574 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a9c7a81-f2a5-48a4-a202-008603e07184-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5j62\" (UID: \"1a9c7a81-f2a5-48a4-a202-008603e07184\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5j62" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584600 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1e4eab70-7ebf-4f07-883b-8d08e0453ec1-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-llzvf\" (UID: \"1e4eab70-7ebf-4f07-883b-8d08e0453ec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584629 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zl7zj\" (UniqueName: \"kubernetes.io/projected/a5a7bb2d-e3f4-4c2e-9d78-483724280890-kube-api-access-zl7zj\") pod \"route-controller-manager-6576b87f9c-dtsxp\" (UID: \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584653 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a8b9468d-675b-42d9-b5e8-b45f5d35deef-images\") pod \"machine-api-operator-5694c8668f-lw5xr\" (UID: \"a8b9468d-675b-42d9-b5e8-b45f5d35deef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lw5xr" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584676 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-console-oauth-config\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584700 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzj8t\" (UniqueName: \"kubernetes.io/projected/e8f9f430-36b0-475f-830b-cf5e0c84ed59-kube-api-access-fzj8t\") pod \"console-operator-58897d9998-kzhtk\" (UID: \"e8f9f430-36b0-475f-830b-cf5e0c84ed59\") " pod="openshift-console-operator/console-operator-58897d9998-kzhtk" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584721 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d7f325c2-54b2-42b3-bcab-466aa71dc831-trusted-ca-bundle\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584742 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztswx\" (UniqueName: \"kubernetes.io/projected/d7f325c2-54b2-42b3-bcab-466aa71dc831-kube-api-access-ztswx\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.584765 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxfjk\" (UniqueName: \"kubernetes.io/projected/dd179c6a-42d1-41bb-a584-dd843ec84bb4-kube-api-access-kxfjk\") pod \"dns-operator-744455d44c-462ht\" (UID: \"dd179c6a-42d1-41bb-a584-dd843ec84bb4\") " pod="openshift-dns-operator/dns-operator-744455d44c-462ht" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.585603 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.590263 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.592533 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rmv8r"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.592706 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7f325c2-54b2-42b3-bcab-466aa71dc831-config\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.593060 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-hbstw"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.593078 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-np5qp"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.593372 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-lw5xr"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.593386 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.593680 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8f9f430-36b0-475f-830b-cf5e0c84ed59-config\") pod \"console-operator-58897d9998-kzhtk\" (UID: \"e8f9f430-36b0-475f-830b-cf5e0c84ed59\") " pod="openshift-console-operator/console-operator-58897d9998-kzhtk" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.593853 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fa6a508-d3a9-44de-a87a-4a5c474ad589-config\") pod \"machine-approver-56656f9798-b8pqc\" (UID: \"7fa6a508-d3a9-44de-a87a-4a5c474ad589\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.593957 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.594244 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-w4cgg"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.594321 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d7f325c2-54b2-42b3-bcab-466aa71dc831-etcd-client\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.594357 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34cebeb0-e452-40a4-9c67-43353a244e15-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-z2lfm\" (UID: \"34cebeb0-e452-40a4-9c67-43353a244e15\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z2lfm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.594644 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mcjm4"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.595172 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-csm2k"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.595435 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-service-ca\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.595481 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-462ht"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.595567 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-csm2k" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.595785 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-rmv8r" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.596113 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.596327 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.596431 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a8b9468d-675b-42d9-b5e8-b45f5d35deef-images\") pod \"machine-api-operator-5694c8668f-lw5xr\" (UID: \"a8b9468d-675b-42d9-b5e8-b45f5d35deef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lw5xr" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.596639 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.596820 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-console-config\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.597264 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19c551e7-757b-4136-a143-6b6aa8152c57-client-ca\") pod \"controller-manager-879f6c89f-hbstw\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.597339 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xg5bn"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.597602 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c74fca8-3a6a-4253-9284-dd6417e8420a-config\") pod \"authentication-operator-69f744f599-xg5bn\" (UID: \"1c74fca8-3a6a-4253-9284-dd6417e8420a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.597622 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d7f325c2-54b2-42b3-bcab-466aa71dc831-trusted-ca-bundle\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.597780 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/19c551e7-757b-4136-a143-6b6aa8152c57-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-hbstw\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.598247 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7fa6a508-d3a9-44de-a87a-4a5c474ad589-auth-proxy-config\") pod \"machine-approver-56656f9798-b8pqc\" (UID: \"7fa6a508-d3a9-44de-a87a-4a5c474ad589\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.599722 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-k28dm"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.601785 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d7f325c2-54b2-42b3-bcab-466aa71dc831-node-pullsecrets\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.601800 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19c551e7-757b-4136-a143-6b6aa8152c57-config\") pod \"controller-manager-879f6c89f-hbstw\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.602371 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/d7f325c2-54b2-42b3-bcab-466aa71dc831-audit\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.602380 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8f9f430-36b0-475f-830b-cf5e0c84ed59-serving-cert\") pod \"console-operator-58897d9998-kzhtk\" (UID: \"e8f9f430-36b0-475f-830b-cf5e0c84ed59\") " pod="openshift-console-operator/console-operator-58897d9998-kzhtk" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.602780 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-trusted-ca-bundle\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.603117 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e8f9f430-36b0-475f-830b-cf5e0c84ed59-trusted-ca\") pod \"console-operator-58897d9998-kzhtk\" (UID: \"e8f9f430-36b0-475f-830b-cf5e0c84ed59\") " pod="openshift-console-operator/console-operator-58897d9998-kzhtk" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.603164 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-oauth-serving-cert\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.603310 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d7f325c2-54b2-42b3-bcab-466aa71dc831-etcd-serving-ca\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.603359 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d7f325c2-54b2-42b3-bcab-466aa71dc831-audit-dir\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.603924 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8b9468d-675b-42d9-b5e8-b45f5d35deef-config\") pod \"machine-api-operator-5694c8668f-lw5xr\" (UID: \"a8b9468d-675b-42d9-b5e8-b45f5d35deef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lw5xr" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.604760 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c6e656f-7683-4fe1-9a4f-00885aa67657-config\") pod \"openshift-apiserver-operator-796bbdcf4f-2ghv5\" (UID: \"4c6e656f-7683-4fe1-9a4f-00885aa67657\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2ghv5" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.604814 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-kgddg"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.604827 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/d7f325c2-54b2-42b3-bcab-466aa71dc831-image-import-ca\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.604838 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c74fca8-3a6a-4253-9284-dd6417e8420a-serving-cert\") pod \"authentication-operator-69f744f599-xg5bn\" (UID: \"1c74fca8-3a6a-4253-9284-dd6417e8420a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.604923 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1c74fca8-3a6a-4253-9284-dd6417e8420a-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xg5bn\" (UID: \"1c74fca8-3a6a-4253-9284-dd6417e8420a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.605163 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5a7bb2d-e3f4-4c2e-9d78-483724280890-config\") pod \"route-controller-manager-6576b87f9c-dtsxp\" (UID: \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.605254 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d7f325c2-54b2-42b3-bcab-466aa71dc831-encryption-config\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.605499 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1c74fca8-3a6a-4253-9284-dd6417e8420a-service-ca-bundle\") pod \"authentication-operator-69f744f599-xg5bn\" (UID: \"1c74fca8-3a6a-4253-9284-dd6417e8420a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.605600 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-w4cgg" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.605682 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mcjm4" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.606242 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a5a7bb2d-e3f4-4c2e-9d78-483724280890-client-ca\") pod \"route-controller-manager-6576b87f9c-dtsxp\" (UID: \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.607064 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-v5crb"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.608987 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.610587 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d7f325c2-54b2-42b3-bcab-466aa71dc831-serving-cert\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.611041 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z2lfm"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.611468 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/dd179c6a-42d1-41bb-a584-dd843ec84bb4-metrics-tls\") pod \"dns-operator-744455d44c-462ht\" (UID: \"dd179c6a-42d1-41bb-a584-dd843ec84bb4\") " pod="openshift-dns-operator/dns-operator-744455d44c-462ht" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.611505 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-console-oauth-config\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.612654 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/a8b9468d-675b-42d9-b5e8-b45f5d35deef-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-lw5xr\" (UID: \"a8b9468d-675b-42d9-b5e8-b45f5d35deef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lw5xr" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.612996 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.613272 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34cebeb0-e452-40a4-9c67-43353a244e15-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-z2lfm\" (UID: \"34cebeb0-e452-40a4-9c67-43353a244e15\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z2lfm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.613449 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5a7bb2d-e3f4-4c2e-9d78-483724280890-serving-cert\") pod \"route-controller-manager-6576b87f9c-dtsxp\" (UID: \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.613711 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19c551e7-757b-4136-a143-6b6aa8152c57-serving-cert\") pod \"controller-manager-879f6c89f-hbstw\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.613941 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ed905c2a-053e-4094-bb3b-91412f1ec0f8-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-55cqf\" (UID: \"ed905c2a-053e-4094-bb3b-91412f1ec0f8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55cqf" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.614185 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-console-serving-cert\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.617082 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-kzhtk"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.618663 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7fa6a508-d3a9-44de-a87a-4a5c474ad589-machine-approver-tls\") pod \"machine-approver-56656f9798-b8pqc\" (UID: \"7fa6a508-d3a9-44de-a87a-4a5c474ad589\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.620647 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.620967 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-pmzmr"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.623820 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.626630 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t9nkq"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.630215 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.632580 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c6e656f-7683-4fe1-9a4f-00885aa67657-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-2ghv5\" (UID: \"4c6e656f-7683-4fe1-9a4f-00885aa67657\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2ghv5" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.632698 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rmv8r"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.634619 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-kb4vv"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.635732 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.642819 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.645100 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.645270 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l5vs7"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.647209 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.651296 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-dpwsf"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.653128 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-brt2c"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.654170 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2ghv5"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.656016 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55cqf"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.657556 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-8j6kq"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.658462 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-8j6kq" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.659109 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zqzww"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.659508 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.661775 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-k9nrk"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.662805 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.662915 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.664285 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-mwh8q"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.668086 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.671594 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f8jvt"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.674468 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-nrt8l"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.675858 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5j62"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.677521 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-8j6kq"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.678540 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tkpkx"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.680071 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.680916 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-w4cgg"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.682544 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-k9nrk"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.683815 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mcjm4"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.685375 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.686626 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-np5qp"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.687852 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-f45zh"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.688366 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/1e4eab70-7ebf-4f07-883b-8d08e0453ec1-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-llzvf\" (UID: \"1e4eab70-7ebf-4f07-883b-8d08e0453ec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.688435 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1e4eab70-7ebf-4f07-883b-8d08e0453ec1-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-llzvf\" (UID: \"1e4eab70-7ebf-4f07-883b-8d08e0453ec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.688554 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a9c7a81-f2a5-48a4-a202-008603e07184-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5j62\" (UID: \"1a9c7a81-f2a5-48a4-a202-008603e07184\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5j62" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.688579 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qjjz\" (UniqueName: \"kubernetes.io/projected/1a9c7a81-f2a5-48a4-a202-008603e07184-kube-api-access-4qjjz\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5j62\" (UID: \"1a9c7a81-f2a5-48a4-a202-008603e07184\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5j62" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.688601 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a9c7a81-f2a5-48a4-a202-008603e07184-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5j62\" (UID: \"1a9c7a81-f2a5-48a4-a202-008603e07184\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5j62" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.688626 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1e4eab70-7ebf-4f07-883b-8d08e0453ec1-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-llzvf\" (UID: \"1e4eab70-7ebf-4f07-883b-8d08e0453ec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.688689 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1bf00f2e-253d-4ce1-afeb-e559137c9488-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-zqzww\" (UID: \"1bf00f2e-253d-4ce1-afeb-e559137c9488\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zqzww" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.688735 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf00f2e-253d-4ce1-afeb-e559137c9488-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-zqzww\" (UID: \"1bf00f2e-253d-4ce1-afeb-e559137c9488\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zqzww" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.688759 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prlbd\" (UniqueName: \"kubernetes.io/projected/1e4eab70-7ebf-4f07-883b-8d08e0453ec1-kube-api-access-prlbd\") pod \"cluster-image-registry-operator-dc59b4c8b-llzvf\" (UID: \"1e4eab70-7ebf-4f07-883b-8d08e0453ec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.688804 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf00f2e-253d-4ce1-afeb-e559137c9488-config\") pod \"kube-apiserver-operator-766d6c64bb-zqzww\" (UID: \"1bf00f2e-253d-4ce1-afeb-e559137c9488\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zqzww" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.688967 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-f45zh" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.689506 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-f45zh"] Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.690314 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1e4eab70-7ebf-4f07-883b-8d08e0453ec1-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-llzvf\" (UID: \"1e4eab70-7ebf-4f07-883b-8d08e0453ec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.691926 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/1e4eab70-7ebf-4f07-883b-8d08e0453ec1-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-llzvf\" (UID: \"1e4eab70-7ebf-4f07-883b-8d08e0453ec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.700091 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.720106 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.731614 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf00f2e-253d-4ce1-afeb-e559137c9488-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-zqzww\" (UID: \"1bf00f2e-253d-4ce1-afeb-e559137c9488\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zqzww" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.740286 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.749442 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf00f2e-253d-4ce1-afeb-e559137c9488-config\") pod \"kube-apiserver-operator-766d6c64bb-zqzww\" (UID: \"1bf00f2e-253d-4ce1-afeb-e559137c9488\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zqzww" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.759840 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.779568 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.801039 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.820204 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.840327 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.860529 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.880612 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.900093 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.919870 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.940407 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.960308 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 03 08:42:44 crc kubenswrapper[4899]: I1003 08:42:44.981603 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.000632 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.020436 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.040244 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.061327 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.080692 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.100160 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.121488 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.140740 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.160119 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.181699 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.201056 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.221269 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.241158 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.260707 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.281242 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.300840 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.319975 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.341639 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.361262 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.380877 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.400700 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.419721 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.440650 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.461443 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.480291 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.501853 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.519072 4899 request.go:700] Waited for 1.006624158s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager-operator/configmaps?fieldSelector=metadata.name%3Dkube-controller-manager-operator-config&limit=500&resourceVersion=0 Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.522174 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.541933 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.561351 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.580348 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.600462 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.621291 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.640534 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.660911 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.680553 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 03 08:42:45 crc kubenswrapper[4899]: E1003 08:42:45.689536 4899 configmap.go:193] Couldn't get configMap openshift-kube-storage-version-migrator-operator/config: failed to sync configmap cache: timed out waiting for the condition Oct 03 08:42:45 crc kubenswrapper[4899]: E1003 08:42:45.689597 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1a9c7a81-f2a5-48a4-a202-008603e07184-config podName:1a9c7a81-f2a5-48a4-a202-008603e07184 nodeName:}" failed. No retries permitted until 2025-10-03 08:42:46.189582558 +0000 UTC m=+140.297067501 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/1a9c7a81-f2a5-48a4-a202-008603e07184-config") pod "kube-storage-version-migrator-operator-b67b599dd-r5j62" (UID: "1a9c7a81-f2a5-48a4-a202-008603e07184") : failed to sync configmap cache: timed out waiting for the condition Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.693864 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a9c7a81-f2a5-48a4-a202-008603e07184-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5j62\" (UID: \"1a9c7a81-f2a5-48a4-a202-008603e07184\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5j62" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.701626 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.720793 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.740452 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.781123 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.820115 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.854015 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxfjk\" (UniqueName: \"kubernetes.io/projected/dd179c6a-42d1-41bb-a584-dd843ec84bb4-kube-api-access-kxfjk\") pod \"dns-operator-744455d44c-462ht\" (UID: \"dd179c6a-42d1-41bb-a584-dd843ec84bb4\") " pod="openshift-dns-operator/dns-operator-744455d44c-462ht" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.872626 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-462ht" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.874668 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnqdg\" (UniqueName: \"kubernetes.io/projected/34cebeb0-e452-40a4-9c67-43353a244e15-kube-api-access-xnqdg\") pod \"openshift-controller-manager-operator-756b6f6bc6-z2lfm\" (UID: \"34cebeb0-e452-40a4-9c67-43353a244e15\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z2lfm" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.895957 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9q4p\" (UniqueName: \"kubernetes.io/projected/a8b9468d-675b-42d9-b5e8-b45f5d35deef-kube-api-access-q9q4p\") pod \"machine-api-operator-5694c8668f-lw5xr\" (UID: \"a8b9468d-675b-42d9-b5e8-b45f5d35deef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lw5xr" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.914722 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bz5m\" (UniqueName: \"kubernetes.io/projected/4c6e656f-7683-4fe1-9a4f-00885aa67657-kube-api-access-6bz5m\") pod \"openshift-apiserver-operator-796bbdcf4f-2ghv5\" (UID: \"4c6e656f-7683-4fe1-9a4f-00885aa67657\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2ghv5" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.923290 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-lw5xr" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.934604 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zl7zj\" (UniqueName: \"kubernetes.io/projected/a5a7bb2d-e3f4-4c2e-9d78-483724280890-kube-api-access-zl7zj\") pod \"route-controller-manager-6576b87f9c-dtsxp\" (UID: \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.943418 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.961542 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 03 08:42:45 crc kubenswrapper[4899]: I1003 08:42:45.984917 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.000753 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.037644 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztswx\" (UniqueName: \"kubernetes.io/projected/d7f325c2-54b2-42b3-bcab-466aa71dc831-kube-api-access-ztswx\") pod \"apiserver-76f77b778f-v5crb\" (UID: \"d7f325c2-54b2-42b3-bcab-466aa71dc831\") " pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.040616 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.060112 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.079047 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.079100 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.080037 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.088140 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-lw5xr"] Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.100352 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.120478 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.126346 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-lw5xr" event={"ID":"a8b9468d-675b-42d9-b5e8-b45f5d35deef","Type":"ContainerStarted","Data":"f8a63c4c7105d8937820aa47a0e80fe34f388513cecec9801dbd71b61cbaadea"} Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.157061 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z2lfm" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.161215 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzj8t\" (UniqueName: \"kubernetes.io/projected/e8f9f430-36b0-475f-830b-cf5e0c84ed59-kube-api-access-fzj8t\") pod \"console-operator-58897d9998-kzhtk\" (UID: \"e8f9f430-36b0-475f-830b-cf5e0c84ed59\") " pod="openshift-console-operator/console-operator-58897d9998-kzhtk" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.163315 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2ghv5" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.176746 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8swk\" (UniqueName: \"kubernetes.io/projected/7fa6a508-d3a9-44de-a87a-4a5c474ad589-kube-api-access-x8swk\") pod \"machine-approver-56656f9798-b8pqc\" (UID: \"7fa6a508-d3a9-44de-a87a-4a5c474ad589\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.183418 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.186305 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 03 08:42:46 crc kubenswrapper[4899]: W1003 08:42:46.196333 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7fa6a508_d3a9_44de_a87a_4a5c474ad589.slice/crio-a0af2bb61ee48798d01978be77645554e26c933ce70a871957ecb40b92ea9098 WatchSource:0}: Error finding container a0af2bb61ee48798d01978be77645554e26c933ce70a871957ecb40b92ea9098: Status 404 returned error can't find the container with id a0af2bb61ee48798d01978be77645554e26c933ce70a871957ecb40b92ea9098 Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.200871 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.205597 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-kzhtk" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.206121 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a9c7a81-f2a5-48a4-a202-008603e07184-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5j62\" (UID: \"1a9c7a81-f2a5-48a4-a202-008603e07184\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5j62" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.206801 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a9c7a81-f2a5-48a4-a202-008603e07184-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5j62\" (UID: \"1a9c7a81-f2a5-48a4-a202-008603e07184\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5j62" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.221333 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.232115 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-v5crb"] Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.241974 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.244483 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-462ht"] Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.269525 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp"] Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.277635 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsg4r\" (UniqueName: \"kubernetes.io/projected/1c74fca8-3a6a-4253-9284-dd6417e8420a-kube-api-access-rsg4r\") pod \"authentication-operator-69f744f599-xg5bn\" (UID: \"1c74fca8-3a6a-4253-9284-dd6417e8420a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.299721 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6nbs\" (UniqueName: \"kubernetes.io/projected/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-kube-api-access-s6nbs\") pod \"console-f9d7485db-k28dm\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.316734 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lw5j8\" (UniqueName: \"kubernetes.io/projected/ed905c2a-053e-4094-bb3b-91412f1ec0f8-kube-api-access-lw5j8\") pod \"cluster-samples-operator-665b6dd947-55cqf\" (UID: \"ed905c2a-053e-4094-bb3b-91412f1ec0f8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55cqf" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.322595 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.340088 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.353800 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z2lfm"] Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.375117 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2ghv5"] Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.375372 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6t5k\" (UniqueName: \"kubernetes.io/projected/19c551e7-757b-4136-a143-6b6aa8152c57-kube-api-access-c6t5k\") pod \"controller-manager-879f6c89f-hbstw\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.380679 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.382869 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:46 crc kubenswrapper[4899]: W1003 08:42:46.384997 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod34cebeb0_e452_40a4_9c67_43353a244e15.slice/crio-95efba82a7c1360d85810cf2ab41638f7edc9ec9149616b223e2a1919740a5d8 WatchSource:0}: Error finding container 95efba82a7c1360d85810cf2ab41638f7edc9ec9149616b223e2a1919740a5d8: Status 404 returned error can't find the container with id 95efba82a7c1360d85810cf2ab41638f7edc9ec9149616b223e2a1919740a5d8 Oct 03 08:42:46 crc kubenswrapper[4899]: W1003 08:42:46.389377 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c6e656f_7683_4fe1_9a4f_00885aa67657.slice/crio-61d4659dc1b247caed8808aeb27ec61d9f5b100cc5892af42e98ff00a8361dc5 WatchSource:0}: Error finding container 61d4659dc1b247caed8808aeb27ec61d9f5b100cc5892af42e98ff00a8361dc5: Status 404 returned error can't find the container with id 61d4659dc1b247caed8808aeb27ec61d9f5b100cc5892af42e98ff00a8361dc5 Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.399946 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.419855 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.420001 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-kzhtk"] Oct 03 08:42:46 crc kubenswrapper[4899]: W1003 08:42:46.432061 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode8f9f430_36b0_475f_830b_cf5e0c84ed59.slice/crio-46c9ad9d751132132a61742c42dba73de8baa306cb94785d2a103db4aa5f85b7 WatchSource:0}: Error finding container 46c9ad9d751132132a61742c42dba73de8baa306cb94785d2a103db4aa5f85b7: Status 404 returned error can't find the container with id 46c9ad9d751132132a61742c42dba73de8baa306cb94785d2a103db4aa5f85b7 Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.440043 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.444007 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.460871 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.483366 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.489399 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55cqf" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.500492 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.521294 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.538688 4899 request.go:700] Waited for 1.879951858s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns/configmaps?fieldSelector=metadata.name%3Ddns-default&limit=500&resourceVersion=0 Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.540575 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.562955 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.564903 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.582528 4899 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.600182 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.613149 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-hbstw"] Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.642697 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1e4eab70-7ebf-4f07-883b-8d08e0453ec1-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-llzvf\" (UID: \"1e4eab70-7ebf-4f07-883b-8d08e0453ec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.657869 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qjjz\" (UniqueName: \"kubernetes.io/projected/1a9c7a81-f2a5-48a4-a202-008603e07184-kube-api-access-4qjjz\") pod \"kube-storage-version-migrator-operator-b67b599dd-r5j62\" (UID: \"1a9c7a81-f2a5-48a4-a202-008603e07184\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5j62" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.677390 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1bf00f2e-253d-4ce1-afeb-e559137c9488-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-zqzww\" (UID: \"1bf00f2e-253d-4ce1-afeb-e559137c9488\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zqzww" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.696447 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xg5bn"] Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.698152 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prlbd\" (UniqueName: \"kubernetes.io/projected/1e4eab70-7ebf-4f07-883b-8d08e0453ec1-kube-api-access-prlbd\") pod \"cluster-image-registry-operator-dc59b4c8b-llzvf\" (UID: \"1e4eab70-7ebf-4f07-883b-8d08e0453ec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.701725 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.717721 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55cqf"] Oct 03 08:42:46 crc kubenswrapper[4899]: W1003 08:42:46.719927 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1c74fca8_3a6a_4253_9284_dd6417e8420a.slice/crio-f410f204b7f764dc30d7e875d3e833b9e50b8bc82183a6a46d382a2778cd2320 WatchSource:0}: Error finding container f410f204b7f764dc30d7e875d3e833b9e50b8bc82183a6a46d382a2778cd2320: Status 404 returned error can't find the container with id f410f204b7f764dc30d7e875d3e833b9e50b8bc82183a6a46d382a2778cd2320 Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.720025 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.741545 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.760860 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.791054 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-k28dm"] Oct 03 08:42:46 crc kubenswrapper[4899]: W1003 08:42:46.804190 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod633aedb3_7eca_4c2c_b6c3_69a0f7c4787d.slice/crio-d0034f42474a439149dd168a9351c037b7be101be0c76e77bc2fecb10fbadb99 WatchSource:0}: Error finding container d0034f42474a439149dd168a9351c037b7be101be0c76e77bc2fecb10fbadb99: Status 404 returned error can't find the container with id d0034f42474a439149dd168a9351c037b7be101be0c76e77bc2fecb10fbadb99 Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.810915 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815280 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pj7td\" (UniqueName: \"kubernetes.io/projected/9c011ebc-362b-4893-9d7e-6a11a1d3e902-kube-api-access-pj7td\") pod \"router-default-5444994796-tfknb\" (UID: \"9c011ebc-362b-4893-9d7e-6a11a1d3e902\") " pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815327 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815368 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349-config\") pod \"kube-controller-manager-operator-78b949d7b-t9nkq\" (UID: \"4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t9nkq" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815394 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815421 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aee31d1e-9049-4b14-9544-e26bc3ea2b38-trusted-ca\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815445 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9c011ebc-362b-4893-9d7e-6a11a1d3e902-metrics-certs\") pod \"router-default-5444994796-tfknb\" (UID: \"9c011ebc-362b-4893-9d7e-6a11a1d3e902\") " pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815469 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/9c011ebc-362b-4893-9d7e-6a11a1d3e902-default-certificate\") pod \"router-default-5444994796-tfknb\" (UID: \"9c011ebc-362b-4893-9d7e-6a11a1d3e902\") " pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815494 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9d271c75-e1f6-4cf1-8be9-2ef6e27066d6-etcd-client\") pod \"etcd-operator-b45778765-pmzmr\" (UID: \"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815524 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbfdm\" (UniqueName: \"kubernetes.io/projected/ae242b31-ad12-4328-8818-313458ed46aa-kube-api-access-gbfdm\") pod \"control-plane-machine-set-operator-78cbb6b69f-f8jvt\" (UID: \"ae242b31-ad12-4328-8818-313458ed46aa\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f8jvt" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815573 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815598 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/03938235-9990-4806-a4c7-21d1c97cada5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-brt2c\" (UID: \"03938235-9990-4806-a4c7-21d1c97cada5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-brt2c" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815674 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/95e7f40a-6f2f-4689-829f-6d14e941ad9e-etcd-client\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815721 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815768 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h69x5\" (UniqueName: \"kubernetes.io/projected/95e7f40a-6f2f-4689-829f-6d14e941ad9e-kube-api-access-h69x5\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815803 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5h82\" (UniqueName: \"kubernetes.io/projected/f8fd1d26-6eac-46bf-bcba-06593430d823-kube-api-access-l5h82\") pod \"service-ca-operator-777779d784-mwh8q\" (UID: \"f8fd1d26-6eac-46bf-bcba-06593430d823\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mwh8q" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815832 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/95e7f40a-6f2f-4689-829f-6d14e941ad9e-audit-policies\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815848 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-audit-policies\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815865 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-t9nkq\" (UID: \"4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t9nkq" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815880 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47htd\" (UniqueName: \"kubernetes.io/projected/2206e9a3-b591-4d0a-aa09-9dc5b2e54342-kube-api-access-47htd\") pod \"downloads-7954f5f757-dpwsf\" (UID: \"2206e9a3-b591-4d0a-aa09-9dc5b2e54342\") " pod="openshift-console/downloads-7954f5f757-dpwsf" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815920 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d271c75-e1f6-4cf1-8be9-2ef6e27066d6-serving-cert\") pod \"etcd-operator-b45778765-pmzmr\" (UID: \"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.815954 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/95e7f40a-6f2f-4689-829f-6d14e941ad9e-audit-dir\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.816441 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/95e7f40a-6f2f-4689-829f-6d14e941ad9e-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.816471 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/524a0dd7-fc7e-41a5-9304-432c7b6e0624-serving-cert\") pod \"openshift-config-operator-7777fb866f-kgddg\" (UID: \"524a0dd7-fc7e-41a5-9304-432c7b6e0624\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kgddg" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.816803 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/aee31d1e-9049-4b14-9544-e26bc3ea2b38-ca-trust-extracted\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.816844 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-226rl\" (UniqueName: \"kubernetes.io/projected/d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2-kube-api-access-226rl\") pod \"ingress-operator-5b745b69d9-scmz7\" (UID: \"d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.816919 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d271c75-e1f6-4cf1-8be9-2ef6e27066d6-config\") pod \"etcd-operator-b45778765-pmzmr\" (UID: \"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.816971 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.817060 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/9d271c75-e1f6-4cf1-8be9-2ef6e27066d6-etcd-ca\") pod \"etcd-operator-b45778765-pmzmr\" (UID: \"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.817521 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.817664 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03938235-9990-4806-a4c7-21d1c97cada5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-brt2c\" (UID: \"03938235-9990-4806-a4c7-21d1c97cada5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-brt2c" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.817688 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8fd1d26-6eac-46bf-bcba-06593430d823-serving-cert\") pod \"service-ca-operator-777779d784-mwh8q\" (UID: \"f8fd1d26-6eac-46bf-bcba-06593430d823\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mwh8q" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.817716 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.817758 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dcj6\" (UniqueName: \"kubernetes.io/projected/9d271c75-e1f6-4cf1-8be9-2ef6e27066d6-kube-api-access-5dcj6\") pod \"etcd-operator-b45778765-pmzmr\" (UID: \"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.817777 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-t9nkq\" (UID: \"4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t9nkq" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.817835 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h64pb\" (UniqueName: \"kubernetes.io/projected/aee31d1e-9049-4b14-9544-e26bc3ea2b38-kube-api-access-h64pb\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.817853 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.817870 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d2af99a7-4436-4e42-8418-03c1d8c503ad-srv-cert\") pod \"olm-operator-6b444d44fb-4cz5w\" (UID: \"d2af99a7-4436-4e42-8418-03c1d8c503ad\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.817931 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/524a0dd7-fc7e-41a5-9304-432c7b6e0624-available-featuregates\") pod \"openshift-config-operator-7777fb866f-kgddg\" (UID: \"524a0dd7-fc7e-41a5-9304-432c7b6e0624\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kgddg" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.817953 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/95e7f40a-6f2f-4689-829f-6d14e941ad9e-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.817971 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/95e7f40a-6f2f-4689-829f-6d14e941ad9e-encryption-config\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818031 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2-trusted-ca\") pod \"ingress-operator-5b745b69d9-scmz7\" (UID: \"d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818050 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkzvr\" (UniqueName: \"kubernetes.io/projected/524a0dd7-fc7e-41a5-9304-432c7b6e0624-kube-api-access-mkzvr\") pod \"openshift-config-operator-7777fb866f-kgddg\" (UID: \"524a0dd7-fc7e-41a5-9304-432c7b6e0624\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kgddg" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818067 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d2af99a7-4436-4e42-8418-03c1d8c503ad-profile-collector-cert\") pod \"olm-operator-6b444d44fb-4cz5w\" (UID: \"d2af99a7-4436-4e42-8418-03c1d8c503ad\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w" Oct 03 08:42:46 crc kubenswrapper[4899]: E1003 08:42:46.818078 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:47.318065023 +0000 UTC m=+141.425549976 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818101 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jm89f\" (UniqueName: \"kubernetes.io/projected/a2bfb93c-32c2-4dbe-b5a9-44f616a3e299-kube-api-access-jm89f\") pod \"machine-config-controller-84d6567774-kb4vv\" (UID: \"a2bfb93c-32c2-4dbe-b5a9-44f616a3e299\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kb4vv" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818120 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/badd5177-888b-409f-abe1-ffb6902a436b-srv-cert\") pod \"catalog-operator-68c6474976-wmlxz\" (UID: \"badd5177-888b-409f-abe1-ffb6902a436b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818135 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67mdt\" (UniqueName: \"kubernetes.io/projected/d2af99a7-4436-4e42-8418-03c1d8c503ad-kube-api-access-67mdt\") pod \"olm-operator-6b444d44fb-4cz5w\" (UID: \"d2af99a7-4436-4e42-8418-03c1d8c503ad\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818155 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a2bfb93c-32c2-4dbe-b5a9-44f616a3e299-proxy-tls\") pod \"machine-config-controller-84d6567774-kb4vv\" (UID: \"a2bfb93c-32c2-4dbe-b5a9-44f616a3e299\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kb4vv" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818181 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03938235-9990-4806-a4c7-21d1c97cada5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-brt2c\" (UID: \"03938235-9990-4806-a4c7-21d1c97cada5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-brt2c" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818200 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/aee31d1e-9049-4b14-9544-e26bc3ea2b38-registry-certificates\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818217 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2-metrics-tls\") pod \"ingress-operator-5b745b69d9-scmz7\" (UID: \"d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818234 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aee31d1e-9049-4b14-9544-e26bc3ea2b38-bound-sa-token\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818250 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/95e7f40a-6f2f-4689-829f-6d14e941ad9e-serving-cert\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818455 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0-apiservice-cert\") pod \"packageserver-d55dfcdfc-5mh7r\" (UID: \"6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818493 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/ae242b31-ad12-4328-8818-313458ed46aa-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-f8jvt\" (UID: \"ae242b31-ad12-4328-8818-313458ed46aa\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f8jvt" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818512 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c459aabe-55b9-415c-8782-8a112e9ea466-audit-dir\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818527 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818664 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/aee31d1e-9049-4b14-9544-e26bc3ea2b38-registry-tls\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818854 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a2bfb93c-32c2-4dbe-b5a9-44f616a3e299-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-kb4vv\" (UID: \"a2bfb93c-32c2-4dbe-b5a9-44f616a3e299\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kb4vv" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818941 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0-webhook-cert\") pod \"packageserver-d55dfcdfc-5mh7r\" (UID: \"6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.818993 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2z8pw\" (UniqueName: \"kubernetes.io/projected/c459aabe-55b9-415c-8782-8a112e9ea466-kube-api-access-2z8pw\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.819031 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9c011ebc-362b-4893-9d7e-6a11a1d3e902-service-ca-bundle\") pod \"router-default-5444994796-tfknb\" (UID: \"9c011ebc-362b-4893-9d7e-6a11a1d3e902\") " pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.819056 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0-tmpfs\") pod \"packageserver-d55dfcdfc-5mh7r\" (UID: \"6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.819224 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/aee31d1e-9049-4b14-9544-e26bc3ea2b38-installation-pull-secrets\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.819260 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8fd1d26-6eac-46bf-bcba-06593430d823-config\") pod \"service-ca-operator-777779d784-mwh8q\" (UID: \"f8fd1d26-6eac-46bf-bcba-06593430d823\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mwh8q" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.819289 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2-bound-sa-token\") pod \"ingress-operator-5b745b69d9-scmz7\" (UID: \"d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.819411 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.819464 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/9d271c75-e1f6-4cf1-8be9-2ef6e27066d6-etcd-service-ca\") pod \"etcd-operator-b45778765-pmzmr\" (UID: \"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.819497 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7tm2\" (UniqueName: \"kubernetes.io/projected/d183d7e2-7828-46a1-b80b-a1032e417265-kube-api-access-x7tm2\") pod \"migrator-59844c95c7-nrt8l\" (UID: \"d183d7e2-7828-46a1-b80b-a1032e417265\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-nrt8l" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.819522 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsdpj\" (UniqueName: \"kubernetes.io/projected/6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0-kube-api-access-hsdpj\") pod \"packageserver-d55dfcdfc-5mh7r\" (UID: \"6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.819564 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.819623 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/badd5177-888b-409f-abe1-ffb6902a436b-profile-collector-cert\") pod \"catalog-operator-68c6474976-wmlxz\" (UID: \"badd5177-888b-409f-abe1-ffb6902a436b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.819657 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcntn\" (UniqueName: \"kubernetes.io/projected/badd5177-888b-409f-abe1-ffb6902a436b-kube-api-access-tcntn\") pod \"catalog-operator-68c6474976-wmlxz\" (UID: \"badd5177-888b-409f-abe1-ffb6902a436b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.819722 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/9c011ebc-362b-4893-9d7e-6a11a1d3e902-stats-auth\") pod \"router-default-5444994796-tfknb\" (UID: \"9c011ebc-362b-4893-9d7e-6a11a1d3e902\") " pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.819809 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.859080 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zqzww" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.920311 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:46 crc kubenswrapper[4899]: E1003 08:42:46.920560 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:47.420531568 +0000 UTC m=+141.528016521 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.920740 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7tm2\" (UniqueName: \"kubernetes.io/projected/d183d7e2-7828-46a1-b80b-a1032e417265-kube-api-access-x7tm2\") pod \"migrator-59844c95c7-nrt8l\" (UID: \"d183d7e2-7828-46a1-b80b-a1032e417265\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-nrt8l" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.920767 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsdpj\" (UniqueName: \"kubernetes.io/projected/6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0-kube-api-access-hsdpj\") pod \"packageserver-d55dfcdfc-5mh7r\" (UID: \"6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.920798 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/9d271c75-e1f6-4cf1-8be9-2ef6e27066d6-etcd-service-ca\") pod \"etcd-operator-b45778765-pmzmr\" (UID: \"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.920831 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.920875 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/badd5177-888b-409f-abe1-ffb6902a436b-profile-collector-cert\") pod \"catalog-operator-68c6474976-wmlxz\" (UID: \"badd5177-888b-409f-abe1-ffb6902a436b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.920910 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcntn\" (UniqueName: \"kubernetes.io/projected/badd5177-888b-409f-abe1-ffb6902a436b-kube-api-access-tcntn\") pod \"catalog-operator-68c6474976-wmlxz\" (UID: \"badd5177-888b-409f-abe1-ffb6902a436b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.920932 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/9c011ebc-362b-4893-9d7e-6a11a1d3e902-stats-auth\") pod \"router-default-5444994796-tfknb\" (UID: \"9c011ebc-362b-4893-9d7e-6a11a1d3e902\") " pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.920950 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.920983 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pj7td\" (UniqueName: \"kubernetes.io/projected/9c011ebc-362b-4893-9d7e-6a11a1d3e902-kube-api-access-pj7td\") pod \"router-default-5444994796-tfknb\" (UID: \"9c011ebc-362b-4893-9d7e-6a11a1d3e902\") " pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921002 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921026 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349-config\") pod \"kube-controller-manager-operator-78b949d7b-t9nkq\" (UID: \"4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t9nkq" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921048 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921071 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7a94e3ca-3b81-4dbf-a1db-73e6b752427c-proxy-tls\") pod \"machine-config-operator-74547568cd-k95tt\" (UID: \"7a94e3ca-3b81-4dbf-a1db-73e6b752427c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921115 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aee31d1e-9049-4b14-9544-e26bc3ea2b38-trusted-ca\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921135 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9c011ebc-362b-4893-9d7e-6a11a1d3e902-metrics-certs\") pod \"router-default-5444994796-tfknb\" (UID: \"9c011ebc-362b-4893-9d7e-6a11a1d3e902\") " pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921157 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5cs8\" (UniqueName: \"kubernetes.io/projected/264bb2e1-d946-4b30-9aa8-48cb9a9447e5-kube-api-access-d5cs8\") pod \"collect-profiles-29324670-2j7rr\" (UID: \"264bb2e1-d946-4b30-9aa8-48cb9a9447e5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921198 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/9c011ebc-362b-4893-9d7e-6a11a1d3e902-default-certificate\") pod \"router-default-5444994796-tfknb\" (UID: \"9c011ebc-362b-4893-9d7e-6a11a1d3e902\") " pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921219 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9d271c75-e1f6-4cf1-8be9-2ef6e27066d6-etcd-client\") pod \"etcd-operator-b45778765-pmzmr\" (UID: \"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921241 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbfdm\" (UniqueName: \"kubernetes.io/projected/ae242b31-ad12-4328-8818-313458ed46aa-kube-api-access-gbfdm\") pod \"control-plane-machine-set-operator-78cbb6b69f-f8jvt\" (UID: \"ae242b31-ad12-4328-8818-313458ed46aa\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f8jvt" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921262 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/331490c6-4b8d-44cd-9081-5d9a7db1942a-socket-dir\") pod \"csi-hostpathplugin-k9nrk\" (UID: \"331490c6-4b8d-44cd-9081-5d9a7db1942a\") " pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921285 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921306 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/03938235-9990-4806-a4c7-21d1c97cada5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-brt2c\" (UID: \"03938235-9990-4806-a4c7-21d1c97cada5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-brt2c" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921330 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/06b939f2-8591-4799-b59e-34cd8677aca6-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-w4cgg\" (UID: \"06b939f2-8591-4799-b59e-34cd8677aca6\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-w4cgg" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921362 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/95e7f40a-6f2f-4689-829f-6d14e941ad9e-etcd-client\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921384 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921407 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h69x5\" (UniqueName: \"kubernetes.io/projected/95e7f40a-6f2f-4689-829f-6d14e941ad9e-kube-api-access-h69x5\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921442 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vb65\" (UniqueName: \"kubernetes.io/projected/ad88b591-8c16-4151-92e0-5e2fd6d352f2-kube-api-access-6vb65\") pod \"package-server-manager-789f6589d5-mcjm4\" (UID: \"ad88b591-8c16-4151-92e0-5e2fd6d352f2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mcjm4" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921474 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5h82\" (UniqueName: \"kubernetes.io/projected/f8fd1d26-6eac-46bf-bcba-06593430d823-kube-api-access-l5h82\") pod \"service-ca-operator-777779d784-mwh8q\" (UID: \"f8fd1d26-6eac-46bf-bcba-06593430d823\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mwh8q" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921496 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/95e7f40a-6f2f-4689-829f-6d14e941ad9e-audit-policies\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921520 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-audit-policies\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921542 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/79762752-6337-410e-8bc7-c0a47aa5d773-cert\") pod \"ingress-canary-f45zh\" (UID: \"79762752-6337-410e-8bc7-c0a47aa5d773\") " pod="openshift-ingress-canary/ingress-canary-f45zh" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921561 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1f907faf-6eea-49c9-b601-b108e328d9d6-certs\") pod \"machine-config-server-csm2k\" (UID: \"1f907faf-6eea-49c9-b601-b108e328d9d6\") " pod="openshift-machine-config-operator/machine-config-server-csm2k" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921581 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/eca40a31-8b9d-4825-82e2-253991771725-signing-cabundle\") pod \"service-ca-9c57cc56f-rmv8r\" (UID: \"eca40a31-8b9d-4825-82e2-253991771725\") " pod="openshift-service-ca/service-ca-9c57cc56f-rmv8r" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921604 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-t9nkq\" (UID: \"4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t9nkq" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921624 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47htd\" (UniqueName: \"kubernetes.io/projected/2206e9a3-b591-4d0a-aa09-9dc5b2e54342-kube-api-access-47htd\") pod \"downloads-7954f5f757-dpwsf\" (UID: \"2206e9a3-b591-4d0a-aa09-9dc5b2e54342\") " pod="openshift-console/downloads-7954f5f757-dpwsf" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921678 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsxj6\" (UniqueName: \"kubernetes.io/projected/a4456e30-3eb3-4e1a-b22d-8888babf06a9-kube-api-access-lsxj6\") pod \"marketplace-operator-79b997595-np5qp\" (UID: \"a4456e30-3eb3-4e1a-b22d-8888babf06a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921711 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d271c75-e1f6-4cf1-8be9-2ef6e27066d6-serving-cert\") pod \"etcd-operator-b45778765-pmzmr\" (UID: \"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921732 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/95e7f40a-6f2f-4689-829f-6d14e941ad9e-audit-dir\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921776 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/95e7f40a-6f2f-4689-829f-6d14e941ad9e-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921801 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/524a0dd7-fc7e-41a5-9304-432c7b6e0624-serving-cert\") pod \"openshift-config-operator-7777fb866f-kgddg\" (UID: \"524a0dd7-fc7e-41a5-9304-432c7b6e0624\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kgddg" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921855 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/eca40a31-8b9d-4825-82e2-253991771725-signing-key\") pod \"service-ca-9c57cc56f-rmv8r\" (UID: \"eca40a31-8b9d-4825-82e2-253991771725\") " pod="openshift-service-ca/service-ca-9c57cc56f-rmv8r" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921948 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/aee31d1e-9049-4b14-9544-e26bc3ea2b38-ca-trust-extracted\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921971 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-226rl\" (UniqueName: \"kubernetes.io/projected/d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2-kube-api-access-226rl\") pod \"ingress-operator-5b745b69d9-scmz7\" (UID: \"d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.921996 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1f907faf-6eea-49c9-b601-b108e328d9d6-node-bootstrap-token\") pod \"machine-config-server-csm2k\" (UID: \"1f907faf-6eea-49c9-b601-b108e328d9d6\") " pod="openshift-machine-config-operator/machine-config-server-csm2k" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.922024 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtn5v\" (UniqueName: \"kubernetes.io/projected/06b939f2-8591-4799-b59e-34cd8677aca6-kube-api-access-qtn5v\") pod \"multus-admission-controller-857f4d67dd-w4cgg\" (UID: \"06b939f2-8591-4799-b59e-34cd8677aca6\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-w4cgg" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.922053 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d271c75-e1f6-4cf1-8be9-2ef6e27066d6-config\") pod \"etcd-operator-b45778765-pmzmr\" (UID: \"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.922122 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.922196 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/9d271c75-e1f6-4cf1-8be9-2ef6e27066d6-etcd-ca\") pod \"etcd-operator-b45778765-pmzmr\" (UID: \"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.922225 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5d7wt\" (UniqueName: \"kubernetes.io/projected/9f3aca40-1b42-4f22-ba89-a9b51d30aa75-kube-api-access-5d7wt\") pod \"dns-default-8j6kq\" (UID: \"9f3aca40-1b42-4f22-ba89-a9b51d30aa75\") " pod="openshift-dns/dns-default-8j6kq" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.922316 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.922369 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmr59\" (UniqueName: \"kubernetes.io/projected/7a94e3ca-3b81-4dbf-a1db-73e6b752427c-kube-api-access-nmr59\") pod \"machine-config-operator-74547568cd-k95tt\" (UID: \"7a94e3ca-3b81-4dbf-a1db-73e6b752427c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.922396 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/331490c6-4b8d-44cd-9081-5d9a7db1942a-plugins-dir\") pod \"csi-hostpathplugin-k9nrk\" (UID: \"331490c6-4b8d-44cd-9081-5d9a7db1942a\") " pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.922463 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsrwv\" (UniqueName: \"kubernetes.io/projected/79762752-6337-410e-8bc7-c0a47aa5d773-kube-api-access-zsrwv\") pod \"ingress-canary-f45zh\" (UID: \"79762752-6337-410e-8bc7-c0a47aa5d773\") " pod="openshift-ingress-canary/ingress-canary-f45zh" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.922542 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03938235-9990-4806-a4c7-21d1c97cada5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-brt2c\" (UID: \"03938235-9990-4806-a4c7-21d1c97cada5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-brt2c" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.922570 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7a94e3ca-3b81-4dbf-a1db-73e6b752427c-images\") pod \"machine-config-operator-74547568cd-k95tt\" (UID: \"7a94e3ca-3b81-4dbf-a1db-73e6b752427c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.922884 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8fd1d26-6eac-46bf-bcba-06593430d823-serving-cert\") pod \"service-ca-operator-777779d784-mwh8q\" (UID: \"f8fd1d26-6eac-46bf-bcba-06593430d823\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mwh8q" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.922959 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9r79f\" (UniqueName: \"kubernetes.io/projected/1f907faf-6eea-49c9-b601-b108e328d9d6-kube-api-access-9r79f\") pod \"machine-config-server-csm2k\" (UID: \"1f907faf-6eea-49c9-b601-b108e328d9d6\") " pod="openshift-machine-config-operator/machine-config-server-csm2k" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.923020 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.923098 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dcj6\" (UniqueName: \"kubernetes.io/projected/9d271c75-e1f6-4cf1-8be9-2ef6e27066d6-kube-api-access-5dcj6\") pod \"etcd-operator-b45778765-pmzmr\" (UID: \"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.923131 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-t9nkq\" (UID: \"4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t9nkq" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.923299 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/9d271c75-e1f6-4cf1-8be9-2ef6e27066d6-etcd-ca\") pod \"etcd-operator-b45778765-pmzmr\" (UID: \"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.923418 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9f3aca40-1b42-4f22-ba89-a9b51d30aa75-config-volume\") pod \"dns-default-8j6kq\" (UID: \"9f3aca40-1b42-4f22-ba89-a9b51d30aa75\") " pod="openshift-dns/dns-default-8j6kq" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.923507 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/264bb2e1-d946-4b30-9aa8-48cb9a9447e5-secret-volume\") pod \"collect-profiles-29324670-2j7rr\" (UID: \"264bb2e1-d946-4b30-9aa8-48cb9a9447e5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.923565 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wlrw\" (UniqueName: \"kubernetes.io/projected/331490c6-4b8d-44cd-9081-5d9a7db1942a-kube-api-access-8wlrw\") pod \"csi-hostpathplugin-k9nrk\" (UID: \"331490c6-4b8d-44cd-9081-5d9a7db1942a\") " pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.923591 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h64pb\" (UniqueName: \"kubernetes.io/projected/aee31d1e-9049-4b14-9544-e26bc3ea2b38-kube-api-access-h64pb\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.923659 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.923684 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/331490c6-4b8d-44cd-9081-5d9a7db1942a-csi-data-dir\") pod \"csi-hostpathplugin-k9nrk\" (UID: \"331490c6-4b8d-44cd-9081-5d9a7db1942a\") " pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.923745 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7a94e3ca-3b81-4dbf-a1db-73e6b752427c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-k95tt\" (UID: \"7a94e3ca-3b81-4dbf-a1db-73e6b752427c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.923948 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/524a0dd7-fc7e-41a5-9304-432c7b6e0624-available-featuregates\") pod \"openshift-config-operator-7777fb866f-kgddg\" (UID: \"524a0dd7-fc7e-41a5-9304-432c7b6e0624\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kgddg" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.923981 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/95e7f40a-6f2f-4689-829f-6d14e941ad9e-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.924163 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/95e7f40a-6f2f-4689-829f-6d14e941ad9e-encryption-config\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.924489 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d271c75-e1f6-4cf1-8be9-2ef6e27066d6-config\") pod \"etcd-operator-b45778765-pmzmr\" (UID: \"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.924796 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2-trusted-ca\") pod \"ingress-operator-5b745b69d9-scmz7\" (UID: \"d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.924831 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d2af99a7-4436-4e42-8418-03c1d8c503ad-srv-cert\") pod \"olm-operator-6b444d44fb-4cz5w\" (UID: \"d2af99a7-4436-4e42-8418-03c1d8c503ad\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.925004 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d2af99a7-4436-4e42-8418-03c1d8c503ad-profile-collector-cert\") pod \"olm-operator-6b444d44fb-4cz5w\" (UID: \"d2af99a7-4436-4e42-8418-03c1d8c503ad\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.925044 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/331490c6-4b8d-44cd-9081-5d9a7db1942a-registration-dir\") pod \"csi-hostpathplugin-k9nrk\" (UID: \"331490c6-4b8d-44cd-9081-5d9a7db1942a\") " pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.925117 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkzvr\" (UniqueName: \"kubernetes.io/projected/524a0dd7-fc7e-41a5-9304-432c7b6e0624-kube-api-access-mkzvr\") pod \"openshift-config-operator-7777fb866f-kgddg\" (UID: \"524a0dd7-fc7e-41a5-9304-432c7b6e0624\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kgddg" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.925256 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/badd5177-888b-409f-abe1-ffb6902a436b-srv-cert\") pod \"catalog-operator-68c6474976-wmlxz\" (UID: \"badd5177-888b-409f-abe1-ffb6902a436b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.925723 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.923943 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/aee31d1e-9049-4b14-9544-e26bc3ea2b38-ca-trust-extracted\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.926217 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67mdt\" (UniqueName: \"kubernetes.io/projected/d2af99a7-4436-4e42-8418-03c1d8c503ad-kube-api-access-67mdt\") pod \"olm-operator-6b444d44fb-4cz5w\" (UID: \"d2af99a7-4436-4e42-8418-03c1d8c503ad\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.922917 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/9d271c75-e1f6-4cf1-8be9-2ef6e27066d6-etcd-service-ca\") pod \"etcd-operator-b45778765-pmzmr\" (UID: \"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.926671 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aee31d1e-9049-4b14-9544-e26bc3ea2b38-trusted-ca\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.927437 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5j62" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.927866 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.927968 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/95e7f40a-6f2f-4689-829f-6d14e941ad9e-audit-dir\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.928638 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/95e7f40a-6f2f-4689-829f-6d14e941ad9e-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.928755 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4456e30-3eb3-4e1a-b22d-8888babf06a9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-np5qp\" (UID: \"a4456e30-3eb3-4e1a-b22d-8888babf06a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.928796 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/264bb2e1-d946-4b30-9aa8-48cb9a9447e5-config-volume\") pod \"collect-profiles-29324670-2j7rr\" (UID: \"264bb2e1-d946-4b30-9aa8-48cb9a9447e5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.928835 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jm89f\" (UniqueName: \"kubernetes.io/projected/a2bfb93c-32c2-4dbe-b5a9-44f616a3e299-kube-api-access-jm89f\") pod \"machine-config-controller-84d6567774-kb4vv\" (UID: \"a2bfb93c-32c2-4dbe-b5a9-44f616a3e299\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kb4vv" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.928864 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a2bfb93c-32c2-4dbe-b5a9-44f616a3e299-proxy-tls\") pod \"machine-config-controller-84d6567774-kb4vv\" (UID: \"a2bfb93c-32c2-4dbe-b5a9-44f616a3e299\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kb4vv" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.928947 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a4456e30-3eb3-4e1a-b22d-8888babf06a9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-np5qp\" (UID: \"a4456e30-3eb3-4e1a-b22d-8888babf06a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.928995 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03938235-9990-4806-a4c7-21d1c97cada5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-brt2c\" (UID: \"03938235-9990-4806-a4c7-21d1c97cada5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-brt2c" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929019 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2-metrics-tls\") pod \"ingress-operator-5b745b69d9-scmz7\" (UID: \"d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929044 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/aee31d1e-9049-4b14-9544-e26bc3ea2b38-registry-certificates\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929068 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aee31d1e-9049-4b14-9544-e26bc3ea2b38-bound-sa-token\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929094 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/95e7f40a-6f2f-4689-829f-6d14e941ad9e-serving-cert\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929115 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0-apiservice-cert\") pod \"packageserver-d55dfcdfc-5mh7r\" (UID: \"6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929137 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76n9t\" (UniqueName: \"kubernetes.io/projected/eca40a31-8b9d-4825-82e2-253991771725-kube-api-access-76n9t\") pod \"service-ca-9c57cc56f-rmv8r\" (UID: \"eca40a31-8b9d-4825-82e2-253991771725\") " pod="openshift-service-ca/service-ca-9c57cc56f-rmv8r" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929203 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c459aabe-55b9-415c-8782-8a112e9ea466-audit-dir\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929226 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929252 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/ae242b31-ad12-4328-8818-313458ed46aa-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-f8jvt\" (UID: \"ae242b31-ad12-4328-8818-313458ed46aa\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f8jvt" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929279 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/aee31d1e-9049-4b14-9544-e26bc3ea2b38-registry-tls\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929299 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a2bfb93c-32c2-4dbe-b5a9-44f616a3e299-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-kb4vv\" (UID: \"a2bfb93c-32c2-4dbe-b5a9-44f616a3e299\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kb4vv" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929322 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0-webhook-cert\") pod \"packageserver-d55dfcdfc-5mh7r\" (UID: \"6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929346 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2z8pw\" (UniqueName: \"kubernetes.io/projected/c459aabe-55b9-415c-8782-8a112e9ea466-kube-api-access-2z8pw\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929398 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03938235-9990-4806-a4c7-21d1c97cada5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-brt2c\" (UID: \"03938235-9990-4806-a4c7-21d1c97cada5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-brt2c" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929598 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9c011ebc-362b-4893-9d7e-6a11a1d3e902-service-ca-bundle\") pod \"router-default-5444994796-tfknb\" (UID: \"9c011ebc-362b-4893-9d7e-6a11a1d3e902\") " pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929628 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0-tmpfs\") pod \"packageserver-d55dfcdfc-5mh7r\" (UID: \"6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929674 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/ad88b591-8c16-4151-92e0-5e2fd6d352f2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mcjm4\" (UID: \"ad88b591-8c16-4151-92e0-5e2fd6d352f2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mcjm4" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929714 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/aee31d1e-9049-4b14-9544-e26bc3ea2b38-installation-pull-secrets\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929740 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8fd1d26-6eac-46bf-bcba-06593430d823-config\") pod \"service-ca-operator-777779d784-mwh8q\" (UID: \"f8fd1d26-6eac-46bf-bcba-06593430d823\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mwh8q" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929761 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2-bound-sa-token\") pod \"ingress-operator-5b745b69d9-scmz7\" (UID: \"d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929786 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/331490c6-4b8d-44cd-9081-5d9a7db1942a-mountpoint-dir\") pod \"csi-hostpathplugin-k9nrk\" (UID: \"331490c6-4b8d-44cd-9081-5d9a7db1942a\") " pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929812 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.929835 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9f3aca40-1b42-4f22-ba89-a9b51d30aa75-metrics-tls\") pod \"dns-default-8j6kq\" (UID: \"9f3aca40-1b42-4f22-ba89-a9b51d30aa75\") " pod="openshift-dns/dns-default-8j6kq" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.930116 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-audit-policies\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.930466 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/95e7f40a-6f2f-4689-829f-6d14e941ad9e-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: E1003 08:42:46.930810 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:47.430793286 +0000 UTC m=+141.538278329 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.931955 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/9c011ebc-362b-4893-9d7e-6a11a1d3e902-stats-auth\") pod \"router-default-5444994796-tfknb\" (UID: \"9c011ebc-362b-4893-9d7e-6a11a1d3e902\") " pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.932463 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/badd5177-888b-409f-abe1-ffb6902a436b-profile-collector-cert\") pod \"catalog-operator-68c6474976-wmlxz\" (UID: \"badd5177-888b-409f-abe1-ffb6902a436b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.932808 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.932873 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/95e7f40a-6f2f-4689-829f-6d14e941ad9e-audit-policies\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.933588 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8fd1d26-6eac-46bf-bcba-06593430d823-config\") pod \"service-ca-operator-777779d784-mwh8q\" (UID: \"f8fd1d26-6eac-46bf-bcba-06593430d823\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mwh8q" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.933849 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.934070 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349-config\") pod \"kube-controller-manager-operator-78b949d7b-t9nkq\" (UID: \"4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t9nkq" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.935321 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9c011ebc-362b-4893-9d7e-6a11a1d3e902-metrics-certs\") pod \"router-default-5444994796-tfknb\" (UID: \"9c011ebc-362b-4893-9d7e-6a11a1d3e902\") " pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.935376 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.940102 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9d271c75-e1f6-4cf1-8be9-2ef6e27066d6-etcd-client\") pod \"etcd-operator-b45778765-pmzmr\" (UID: \"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.940524 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.943948 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-t9nkq\" (UID: \"4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t9nkq" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.944405 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/95e7f40a-6f2f-4689-829f-6d14e941ad9e-etcd-client\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.944502 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d2af99a7-4436-4e42-8418-03c1d8c503ad-srv-cert\") pod \"olm-operator-6b444d44fb-4cz5w\" (UID: \"d2af99a7-4436-4e42-8418-03c1d8c503ad\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.944689 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8fd1d26-6eac-46bf-bcba-06593430d823-serving-cert\") pod \"service-ca-operator-777779d784-mwh8q\" (UID: \"f8fd1d26-6eac-46bf-bcba-06593430d823\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mwh8q" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.945772 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a2bfb93c-32c2-4dbe-b5a9-44f616a3e299-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-kb4vv\" (UID: \"a2bfb93c-32c2-4dbe-b5a9-44f616a3e299\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kb4vv" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.945819 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/aee31d1e-9049-4b14-9544-e26bc3ea2b38-registry-certificates\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.946208 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2-trusted-ca\") pod \"ingress-operator-5b745b69d9-scmz7\" (UID: \"d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.946558 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0-tmpfs\") pod \"packageserver-d55dfcdfc-5mh7r\" (UID: \"6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.946846 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/524a0dd7-fc7e-41a5-9304-432c7b6e0624-available-featuregates\") pod \"openshift-config-operator-7777fb866f-kgddg\" (UID: \"524a0dd7-fc7e-41a5-9304-432c7b6e0624\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kgddg" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.947104 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c459aabe-55b9-415c-8782-8a112e9ea466-audit-dir\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.947365 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a2bfb93c-32c2-4dbe-b5a9-44f616a3e299-proxy-tls\") pod \"machine-config-controller-84d6567774-kb4vv\" (UID: \"a2bfb93c-32c2-4dbe-b5a9-44f616a3e299\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kb4vv" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.948126 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.948305 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.948460 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/ae242b31-ad12-4328-8818-313458ed46aa-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-f8jvt\" (UID: \"ae242b31-ad12-4328-8818-313458ed46aa\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f8jvt" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.948691 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.948733 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/9c011ebc-362b-4893-9d7e-6a11a1d3e902-default-certificate\") pod \"router-default-5444994796-tfknb\" (UID: \"9c011ebc-362b-4893-9d7e-6a11a1d3e902\") " pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.948827 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/badd5177-888b-409f-abe1-ffb6902a436b-srv-cert\") pod \"catalog-operator-68c6474976-wmlxz\" (UID: \"badd5177-888b-409f-abe1-ffb6902a436b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.948885 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0-webhook-cert\") pod \"packageserver-d55dfcdfc-5mh7r\" (UID: \"6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.949433 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9c011ebc-362b-4893-9d7e-6a11a1d3e902-service-ca-bundle\") pod \"router-default-5444994796-tfknb\" (UID: \"9c011ebc-362b-4893-9d7e-6a11a1d3e902\") " pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.949665 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/aee31d1e-9049-4b14-9544-e26bc3ea2b38-registry-tls\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.950055 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.951424 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/95e7f40a-6f2f-4689-829f-6d14e941ad9e-encryption-config\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.952283 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0-apiservice-cert\") pod \"packageserver-d55dfcdfc-5mh7r\" (UID: \"6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.952787 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03938235-9990-4806-a4c7-21d1c97cada5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-brt2c\" (UID: \"03938235-9990-4806-a4c7-21d1c97cada5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-brt2c" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.953287 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d271c75-e1f6-4cf1-8be9-2ef6e27066d6-serving-cert\") pod \"etcd-operator-b45778765-pmzmr\" (UID: \"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.953620 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d2af99a7-4436-4e42-8418-03c1d8c503ad-profile-collector-cert\") pod \"olm-operator-6b444d44fb-4cz5w\" (UID: \"d2af99a7-4436-4e42-8418-03c1d8c503ad\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.953698 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.954039 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2-metrics-tls\") pod \"ingress-operator-5b745b69d9-scmz7\" (UID: \"d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.954771 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/aee31d1e-9049-4b14-9544-e26bc3ea2b38-installation-pull-secrets\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.956723 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/524a0dd7-fc7e-41a5-9304-432c7b6e0624-serving-cert\") pod \"openshift-config-operator-7777fb866f-kgddg\" (UID: \"524a0dd7-fc7e-41a5-9304-432c7b6e0624\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kgddg" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.960391 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/95e7f40a-6f2f-4689-829f-6d14e941ad9e-serving-cert\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.968097 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7tm2\" (UniqueName: \"kubernetes.io/projected/d183d7e2-7828-46a1-b80b-a1032e417265-kube-api-access-x7tm2\") pod \"migrator-59844c95c7-nrt8l\" (UID: \"d183d7e2-7828-46a1-b80b-a1032e417265\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-nrt8l" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.987873 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsdpj\" (UniqueName: \"kubernetes.io/projected/6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0-kube-api-access-hsdpj\") pod \"packageserver-d55dfcdfc-5mh7r\" (UID: \"6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" Oct 03 08:42:46 crc kubenswrapper[4899]: I1003 08:42:46.995715 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/03938235-9990-4806-a4c7-21d1c97cada5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-brt2c\" (UID: \"03938235-9990-4806-a4c7-21d1c97cada5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-brt2c" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.013309 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf"] Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.018690 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-226rl\" (UniqueName: \"kubernetes.io/projected/d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2-kube-api-access-226rl\") pod \"ingress-operator-5b745b69d9-scmz7\" (UID: \"d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.030877 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031072 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/ad88b591-8c16-4151-92e0-5e2fd6d352f2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mcjm4\" (UID: \"ad88b591-8c16-4151-92e0-5e2fd6d352f2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mcjm4" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031113 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/331490c6-4b8d-44cd-9081-5d9a7db1942a-mountpoint-dir\") pod \"csi-hostpathplugin-k9nrk\" (UID: \"331490c6-4b8d-44cd-9081-5d9a7db1942a\") " pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031130 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9f3aca40-1b42-4f22-ba89-a9b51d30aa75-metrics-tls\") pod \"dns-default-8j6kq\" (UID: \"9f3aca40-1b42-4f22-ba89-a9b51d30aa75\") " pod="openshift-dns/dns-default-8j6kq" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031182 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7a94e3ca-3b81-4dbf-a1db-73e6b752427c-proxy-tls\") pod \"machine-config-operator-74547568cd-k95tt\" (UID: \"7a94e3ca-3b81-4dbf-a1db-73e6b752427c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031202 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5cs8\" (UniqueName: \"kubernetes.io/projected/264bb2e1-d946-4b30-9aa8-48cb9a9447e5-kube-api-access-d5cs8\") pod \"collect-profiles-29324670-2j7rr\" (UID: \"264bb2e1-d946-4b30-9aa8-48cb9a9447e5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031227 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/331490c6-4b8d-44cd-9081-5d9a7db1942a-socket-dir\") pod \"csi-hostpathplugin-k9nrk\" (UID: \"331490c6-4b8d-44cd-9081-5d9a7db1942a\") " pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031251 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/06b939f2-8591-4799-b59e-34cd8677aca6-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-w4cgg\" (UID: \"06b939f2-8591-4799-b59e-34cd8677aca6\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-w4cgg" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031299 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vb65\" (UniqueName: \"kubernetes.io/projected/ad88b591-8c16-4151-92e0-5e2fd6d352f2-kube-api-access-6vb65\") pod \"package-server-manager-789f6589d5-mcjm4\" (UID: \"ad88b591-8c16-4151-92e0-5e2fd6d352f2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mcjm4" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031349 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/79762752-6337-410e-8bc7-c0a47aa5d773-cert\") pod \"ingress-canary-f45zh\" (UID: \"79762752-6337-410e-8bc7-c0a47aa5d773\") " pod="openshift-ingress-canary/ingress-canary-f45zh" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031368 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1f907faf-6eea-49c9-b601-b108e328d9d6-certs\") pod \"machine-config-server-csm2k\" (UID: \"1f907faf-6eea-49c9-b601-b108e328d9d6\") " pod="openshift-machine-config-operator/machine-config-server-csm2k" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031389 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/eca40a31-8b9d-4825-82e2-253991771725-signing-cabundle\") pod \"service-ca-9c57cc56f-rmv8r\" (UID: \"eca40a31-8b9d-4825-82e2-253991771725\") " pod="openshift-service-ca/service-ca-9c57cc56f-rmv8r" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031481 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsxj6\" (UniqueName: \"kubernetes.io/projected/a4456e30-3eb3-4e1a-b22d-8888babf06a9-kube-api-access-lsxj6\") pod \"marketplace-operator-79b997595-np5qp\" (UID: \"a4456e30-3eb3-4e1a-b22d-8888babf06a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031515 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/eca40a31-8b9d-4825-82e2-253991771725-signing-key\") pod \"service-ca-9c57cc56f-rmv8r\" (UID: \"eca40a31-8b9d-4825-82e2-253991771725\") " pod="openshift-service-ca/service-ca-9c57cc56f-rmv8r" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031551 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1f907faf-6eea-49c9-b601-b108e328d9d6-node-bootstrap-token\") pod \"machine-config-server-csm2k\" (UID: \"1f907faf-6eea-49c9-b601-b108e328d9d6\") " pod="openshift-machine-config-operator/machine-config-server-csm2k" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031569 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtn5v\" (UniqueName: \"kubernetes.io/projected/06b939f2-8591-4799-b59e-34cd8677aca6-kube-api-access-qtn5v\") pod \"multus-admission-controller-857f4d67dd-w4cgg\" (UID: \"06b939f2-8591-4799-b59e-34cd8677aca6\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-w4cgg" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031587 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5d7wt\" (UniqueName: \"kubernetes.io/projected/9f3aca40-1b42-4f22-ba89-a9b51d30aa75-kube-api-access-5d7wt\") pod \"dns-default-8j6kq\" (UID: \"9f3aca40-1b42-4f22-ba89-a9b51d30aa75\") " pod="openshift-dns/dns-default-8j6kq" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031607 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmr59\" (UniqueName: \"kubernetes.io/projected/7a94e3ca-3b81-4dbf-a1db-73e6b752427c-kube-api-access-nmr59\") pod \"machine-config-operator-74547568cd-k95tt\" (UID: \"7a94e3ca-3b81-4dbf-a1db-73e6b752427c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031623 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/331490c6-4b8d-44cd-9081-5d9a7db1942a-plugins-dir\") pod \"csi-hostpathplugin-k9nrk\" (UID: \"331490c6-4b8d-44cd-9081-5d9a7db1942a\") " pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031643 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsrwv\" (UniqueName: \"kubernetes.io/projected/79762752-6337-410e-8bc7-c0a47aa5d773-kube-api-access-zsrwv\") pod \"ingress-canary-f45zh\" (UID: \"79762752-6337-410e-8bc7-c0a47aa5d773\") " pod="openshift-ingress-canary/ingress-canary-f45zh" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031661 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7a94e3ca-3b81-4dbf-a1db-73e6b752427c-images\") pod \"machine-config-operator-74547568cd-k95tt\" (UID: \"7a94e3ca-3b81-4dbf-a1db-73e6b752427c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031686 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9r79f\" (UniqueName: \"kubernetes.io/projected/1f907faf-6eea-49c9-b601-b108e328d9d6-kube-api-access-9r79f\") pod \"machine-config-server-csm2k\" (UID: \"1f907faf-6eea-49c9-b601-b108e328d9d6\") " pod="openshift-machine-config-operator/machine-config-server-csm2k" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031737 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9f3aca40-1b42-4f22-ba89-a9b51d30aa75-config-volume\") pod \"dns-default-8j6kq\" (UID: \"9f3aca40-1b42-4f22-ba89-a9b51d30aa75\") " pod="openshift-dns/dns-default-8j6kq" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031755 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/264bb2e1-d946-4b30-9aa8-48cb9a9447e5-secret-volume\") pod \"collect-profiles-29324670-2j7rr\" (UID: \"264bb2e1-d946-4b30-9aa8-48cb9a9447e5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031776 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wlrw\" (UniqueName: \"kubernetes.io/projected/331490c6-4b8d-44cd-9081-5d9a7db1942a-kube-api-access-8wlrw\") pod \"csi-hostpathplugin-k9nrk\" (UID: \"331490c6-4b8d-44cd-9081-5d9a7db1942a\") " pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031800 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/331490c6-4b8d-44cd-9081-5d9a7db1942a-csi-data-dir\") pod \"csi-hostpathplugin-k9nrk\" (UID: \"331490c6-4b8d-44cd-9081-5d9a7db1942a\") " pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031816 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7a94e3ca-3b81-4dbf-a1db-73e6b752427c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-k95tt\" (UID: \"7a94e3ca-3b81-4dbf-a1db-73e6b752427c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031843 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/331490c6-4b8d-44cd-9081-5d9a7db1942a-registration-dir\") pod \"csi-hostpathplugin-k9nrk\" (UID: \"331490c6-4b8d-44cd-9081-5d9a7db1942a\") " pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031872 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4456e30-3eb3-4e1a-b22d-8888babf06a9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-np5qp\" (UID: \"a4456e30-3eb3-4e1a-b22d-8888babf06a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031906 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/264bb2e1-d946-4b30-9aa8-48cb9a9447e5-config-volume\") pod \"collect-profiles-29324670-2j7rr\" (UID: \"264bb2e1-d946-4b30-9aa8-48cb9a9447e5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031926 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a4456e30-3eb3-4e1a-b22d-8888babf06a9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-np5qp\" (UID: \"a4456e30-3eb3-4e1a-b22d-8888babf06a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.031958 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76n9t\" (UniqueName: \"kubernetes.io/projected/eca40a31-8b9d-4825-82e2-253991771725-kube-api-access-76n9t\") pod \"service-ca-9c57cc56f-rmv8r\" (UID: \"eca40a31-8b9d-4825-82e2-253991771725\") " pod="openshift-service-ca/service-ca-9c57cc56f-rmv8r" Oct 03 08:42:47 crc kubenswrapper[4899]: E1003 08:42:47.032343 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:47.532314671 +0000 UTC m=+141.639799714 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.032672 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/331490c6-4b8d-44cd-9081-5d9a7db1942a-plugins-dir\") pod \"csi-hostpathplugin-k9nrk\" (UID: \"331490c6-4b8d-44cd-9081-5d9a7db1942a\") " pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.032762 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/331490c6-4b8d-44cd-9081-5d9a7db1942a-csi-data-dir\") pod \"csi-hostpathplugin-k9nrk\" (UID: \"331490c6-4b8d-44cd-9081-5d9a7db1942a\") " pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.033057 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9f3aca40-1b42-4f22-ba89-a9b51d30aa75-config-volume\") pod \"dns-default-8j6kq\" (UID: \"9f3aca40-1b42-4f22-ba89-a9b51d30aa75\") " pod="openshift-dns/dns-default-8j6kq" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.033321 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7a94e3ca-3b81-4dbf-a1db-73e6b752427c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-k95tt\" (UID: \"7a94e3ca-3b81-4dbf-a1db-73e6b752427c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.033368 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7a94e3ca-3b81-4dbf-a1db-73e6b752427c-images\") pod \"machine-config-operator-74547568cd-k95tt\" (UID: \"7a94e3ca-3b81-4dbf-a1db-73e6b752427c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.033409 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/331490c6-4b8d-44cd-9081-5d9a7db1942a-registration-dir\") pod \"csi-hostpathplugin-k9nrk\" (UID: \"331490c6-4b8d-44cd-9081-5d9a7db1942a\") " pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.033757 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/331490c6-4b8d-44cd-9081-5d9a7db1942a-mountpoint-dir\") pod \"csi-hostpathplugin-k9nrk\" (UID: \"331490c6-4b8d-44cd-9081-5d9a7db1942a\") " pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.034776 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/264bb2e1-d946-4b30-9aa8-48cb9a9447e5-config-volume\") pod \"collect-profiles-29324670-2j7rr\" (UID: \"264bb2e1-d946-4b30-9aa8-48cb9a9447e5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.034835 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/331490c6-4b8d-44cd-9081-5d9a7db1942a-socket-dir\") pod \"csi-hostpathplugin-k9nrk\" (UID: \"331490c6-4b8d-44cd-9081-5d9a7db1942a\") " pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.035560 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4456e30-3eb3-4e1a-b22d-8888babf06a9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-np5qp\" (UID: \"a4456e30-3eb3-4e1a-b22d-8888babf06a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.036024 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/eca40a31-8b9d-4825-82e2-253991771725-signing-cabundle\") pod \"service-ca-9c57cc56f-rmv8r\" (UID: \"eca40a31-8b9d-4825-82e2-253991771725\") " pod="openshift-service-ca/service-ca-9c57cc56f-rmv8r" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.039253 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/79762752-6337-410e-8bc7-c0a47aa5d773-cert\") pod \"ingress-canary-f45zh\" (UID: \"79762752-6337-410e-8bc7-c0a47aa5d773\") " pod="openshift-ingress-canary/ingress-canary-f45zh" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.039771 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7a94e3ca-3b81-4dbf-a1db-73e6b752427c-proxy-tls\") pod \"machine-config-operator-74547568cd-k95tt\" (UID: \"7a94e3ca-3b81-4dbf-a1db-73e6b752427c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.039919 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/264bb2e1-d946-4b30-9aa8-48cb9a9447e5-secret-volume\") pod \"collect-profiles-29324670-2j7rr\" (UID: \"264bb2e1-d946-4b30-9aa8-48cb9a9447e5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.040016 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/06b939f2-8591-4799-b59e-34cd8677aca6-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-w4cgg\" (UID: \"06b939f2-8591-4799-b59e-34cd8677aca6\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-w4cgg" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.040465 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9f3aca40-1b42-4f22-ba89-a9b51d30aa75-metrics-tls\") pod \"dns-default-8j6kq\" (UID: \"9f3aca40-1b42-4f22-ba89-a9b51d30aa75\") " pod="openshift-dns/dns-default-8j6kq" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.041085 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a4456e30-3eb3-4e1a-b22d-8888babf06a9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-np5qp\" (UID: \"a4456e30-3eb3-4e1a-b22d-8888babf06a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.042879 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/eca40a31-8b9d-4825-82e2-253991771725-signing-key\") pod \"service-ca-9c57cc56f-rmv8r\" (UID: \"eca40a31-8b9d-4825-82e2-253991771725\") " pod="openshift-service-ca/service-ca-9c57cc56f-rmv8r" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.043001 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/ad88b591-8c16-4151-92e0-5e2fd6d352f2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mcjm4\" (UID: \"ad88b591-8c16-4151-92e0-5e2fd6d352f2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mcjm4" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.043006 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcntn\" (UniqueName: \"kubernetes.io/projected/badd5177-888b-409f-abe1-ffb6902a436b-kube-api-access-tcntn\") pod \"catalog-operator-68c6474976-wmlxz\" (UID: \"badd5177-888b-409f-abe1-ffb6902a436b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.043540 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1f907faf-6eea-49c9-b601-b108e328d9d6-certs\") pod \"machine-config-server-csm2k\" (UID: \"1f907faf-6eea-49c9-b601-b108e328d9d6\") " pod="openshift-machine-config-operator/machine-config-server-csm2k" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.045984 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1f907faf-6eea-49c9-b601-b108e328d9d6-node-bootstrap-token\") pod \"machine-config-server-csm2k\" (UID: \"1f907faf-6eea-49c9-b601-b108e328d9d6\") " pod="openshift-machine-config-operator/machine-config-server-csm2k" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.054240 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h64pb\" (UniqueName: \"kubernetes.io/projected/aee31d1e-9049-4b14-9544-e26bc3ea2b38-kube-api-access-h64pb\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.075974 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zqzww"] Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.078133 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pj7td\" (UniqueName: \"kubernetes.io/projected/9c011ebc-362b-4893-9d7e-6a11a1d3e902-kube-api-access-pj7td\") pod \"router-default-5444994796-tfknb\" (UID: \"9c011ebc-362b-4893-9d7e-6a11a1d3e902\") " pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.097565 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67mdt\" (UniqueName: \"kubernetes.io/projected/d2af99a7-4436-4e42-8418-03c1d8c503ad-kube-api-access-67mdt\") pod \"olm-operator-6b444d44fb-4cz5w\" (UID: \"d2af99a7-4436-4e42-8418-03c1d8c503ad\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.115742 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkzvr\" (UniqueName: \"kubernetes.io/projected/524a0dd7-fc7e-41a5-9304-432c7b6e0624-kube-api-access-mkzvr\") pod \"openshift-config-operator-7777fb866f-kgddg\" (UID: \"524a0dd7-fc7e-41a5-9304-432c7b6e0624\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kgddg" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.124937 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kgddg" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.132818 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:47 crc kubenswrapper[4899]: E1003 08:42:47.133390 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:47.633373382 +0000 UTC m=+141.740858335 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.140562 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2z8pw\" (UniqueName: \"kubernetes.io/projected/c459aabe-55b9-415c-8782-8a112e9ea466-kube-api-access-2z8pw\") pod \"oauth-openshift-558db77b4-tkpkx\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.145843 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.147982 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-lw5xr" event={"ID":"a8b9468d-675b-42d9-b5e8-b45f5d35deef","Type":"ContainerStarted","Data":"417b6a78d1830d9afd79b45d574f56b9c26bdf8ae627290da325649e61890108"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.148048 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-lw5xr" event={"ID":"a8b9468d-675b-42d9-b5e8-b45f5d35deef","Type":"ContainerStarted","Data":"2c28e498fc31b05f538e773be39be52eef3ce066b4d51d0e48aa511c5f9f6576"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.149236 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" event={"ID":"1c74fca8-3a6a-4253-9284-dd6417e8420a","Type":"ContainerStarted","Data":"4819209c9a9dc7da8bc242797fb0d8598d7429ce9d7f435bf91db6e25c5c4ecc"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.149260 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" event={"ID":"1c74fca8-3a6a-4253-9284-dd6417e8420a","Type":"ContainerStarted","Data":"f410f204b7f764dc30d7e875d3e833b9e50b8bc82183a6a46d382a2778cd2320"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.150812 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc" event={"ID":"7fa6a508-d3a9-44de-a87a-4a5c474ad589","Type":"ContainerStarted","Data":"1658346e2f0a46f2ded64c345ad44e674746895e8d00d8e268178cf416692e23"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.150850 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc" event={"ID":"7fa6a508-d3a9-44de-a87a-4a5c474ad589","Type":"ContainerStarted","Data":"d4ef53971f72d12598a4bb92a61d9bf20cd2107043a7bcda0bb69feedb6a8a70"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.150869 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc" event={"ID":"7fa6a508-d3a9-44de-a87a-4a5c474ad589","Type":"ContainerStarted","Data":"a0af2bb61ee48798d01978be77645554e26c933ce70a871957ecb40b92ea9098"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.155268 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z2lfm" event={"ID":"34cebeb0-e452-40a4-9c67-43353a244e15","Type":"ContainerStarted","Data":"3cbe155ddb6a11d855bb5ec140db8cf8253ed3f93c6ecc212fb2e5ac15e81c47"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.155315 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z2lfm" event={"ID":"34cebeb0-e452-40a4-9c67-43353a244e15","Type":"ContainerStarted","Data":"95efba82a7c1360d85810cf2ab41638f7edc9ec9149616b223e2a1919740a5d8"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.155335 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5j62"] Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.156675 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-k28dm" event={"ID":"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d","Type":"ContainerStarted","Data":"0c300360e76f1cadf78f91492ec9b0749864346e3775be9500dd0ab7eaf1a66b"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.156701 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-k28dm" event={"ID":"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d","Type":"ContainerStarted","Data":"d0034f42474a439149dd168a9351c037b7be101be0c76e77bc2fecb10fbadb99"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.157204 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jm89f\" (UniqueName: \"kubernetes.io/projected/a2bfb93c-32c2-4dbe-b5a9-44f616a3e299-kube-api-access-jm89f\") pod \"machine-config-controller-84d6567774-kb4vv\" (UID: \"a2bfb93c-32c2-4dbe-b5a9-44f616a3e299\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kb4vv" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.157640 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" event={"ID":"a5a7bb2d-e3f4-4c2e-9d78-483724280890","Type":"ContainerStarted","Data":"64176c8de8a6ea0ad7fdcdd96914e7b82c8ff8315989d3078a7f611297ab9b47"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.157666 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" event={"ID":"a5a7bb2d-e3f4-4c2e-9d78-483724280890","Type":"ContainerStarted","Data":"93ebd9ac231eed322b17ee13ea12f84d2c3f127706bfd32e8114c70344a9177e"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.158092 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.160146 4899 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-dtsxp container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.160183 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" podUID="a5a7bb2d-e3f4-4c2e-9d78-483724280890" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.162728 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-kzhtk" event={"ID":"e8f9f430-36b0-475f-830b-cf5e0c84ed59","Type":"ContainerStarted","Data":"3aecbba015a745051f50e031834831f8fdcb91440b0a0505820ac5b6f55313e7"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.162763 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-kzhtk" event={"ID":"e8f9f430-36b0-475f-830b-cf5e0c84ed59","Type":"ContainerStarted","Data":"46c9ad9d751132132a61742c42dba73de8baa306cb94785d2a103db4aa5f85b7"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.162973 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-kzhtk" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.163798 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zqzww" event={"ID":"1bf00f2e-253d-4ce1-afeb-e559137c9488","Type":"ContainerStarted","Data":"05c09e3f6aba82d9a0be9ecc5443670bb1fd9a8992e18d8ad9ad6e255958acc3"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.166213 4899 patch_prober.go:28] interesting pod/console-operator-58897d9998-kzhtk container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.18:8443/readyz\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.166260 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-kzhtk" podUID="e8f9f430-36b0-475f-830b-cf5e0c84ed59" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.18:8443/readyz\": dial tcp 10.217.0.18:8443: connect: connection refused" Oct 03 08:42:47 crc kubenswrapper[4899]: W1003 08:42:47.166686 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1a9c7a81_f2a5_48a4_a202_008603e07184.slice/crio-90c3b7e09c200f5c31c110e76fc3616e5d6a3b05efc8b2eb41b4d23b6b58ff3c WatchSource:0}: Error finding container 90c3b7e09c200f5c31c110e76fc3616e5d6a3b05efc8b2eb41b4d23b6b58ff3c: Status 404 returned error can't find the container with id 90c3b7e09c200f5c31c110e76fc3616e5d6a3b05efc8b2eb41b4d23b6b58ff3c Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.166776 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf" event={"ID":"1e4eab70-7ebf-4f07-883b-8d08e0453ec1","Type":"ContainerStarted","Data":"08b70799c54cd9fd9fa053b3d05f7a40663cbddd30dad2de051ff2a96d673d71"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.170582 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-462ht" event={"ID":"dd179c6a-42d1-41bb-a584-dd843ec84bb4","Type":"ContainerStarted","Data":"b49875e51839a86d19122cde23b705e7e551ce0a119b53d40fe816d763d2844d"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.170651 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-462ht" event={"ID":"dd179c6a-42d1-41bb-a584-dd843ec84bb4","Type":"ContainerStarted","Data":"4befd0ccf7096ab260c4a9ae8c49beeffc99aad7fdc444e2179a811ac52e7949"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.170666 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-462ht" event={"ID":"dd179c6a-42d1-41bb-a584-dd843ec84bb4","Type":"ContainerStarted","Data":"928ba7a1b04ec7a0ab9d93249cb2d1926cb7cb39edf7e853b5cff8f31fe02a32"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.172660 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2ghv5" event={"ID":"4c6e656f-7683-4fe1-9a4f-00885aa67657","Type":"ContainerStarted","Data":"b89e9ff70530b032b9eb3cf408fef2822d4c743803c527d2c7f7c93d5e144542"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.172699 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2ghv5" event={"ID":"4c6e656f-7683-4fe1-9a4f-00885aa67657","Type":"ContainerStarted","Data":"61d4659dc1b247caed8808aeb27ec61d9f5b100cc5892af42e98ff00a8361dc5"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.173228 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.173860 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dcj6\" (UniqueName: \"kubernetes.io/projected/9d271c75-e1f6-4cf1-8be9-2ef6e27066d6-kube-api-access-5dcj6\") pod \"etcd-operator-b45778765-pmzmr\" (UID: \"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.175200 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55cqf" event={"ID":"ed905c2a-053e-4094-bb3b-91412f1ec0f8","Type":"ContainerStarted","Data":"a27f354228fbacfcd8eb9b8d7ac4bdf9dc906190b80f6720e535275e593721ef"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.175230 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55cqf" event={"ID":"ed905c2a-053e-4094-bb3b-91412f1ec0f8","Type":"ContainerStarted","Data":"9d07d9bb74ee957cfb282a729a35407669c7a278e7bec175fd7deb128426e1fc"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.177917 4899 generic.go:334] "Generic (PLEG): container finished" podID="d7f325c2-54b2-42b3-bcab-466aa71dc831" containerID="06c9279398022bba154761ca039f10d68404919217b56f8c698bccb7775fa62e" exitCode=0 Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.178039 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-v5crb" event={"ID":"d7f325c2-54b2-42b3-bcab-466aa71dc831","Type":"ContainerDied","Data":"06c9279398022bba154761ca039f10d68404919217b56f8c698bccb7775fa62e"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.178112 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-v5crb" event={"ID":"d7f325c2-54b2-42b3-bcab-466aa71dc831","Type":"ContainerStarted","Data":"cd6a0880cd3604cbf900a5560aa5ae458fa576a51f1d8ee2cc9bfe9afb410dea"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.179304 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kb4vv" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.180746 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" event={"ID":"19c551e7-757b-4136-a143-6b6aa8152c57","Type":"ContainerStarted","Data":"d07e2c20a6dba85d720c85c3fa6e2cdcfee55f19c58246fb1ba077882cc14bf4"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.181272 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" event={"ID":"19c551e7-757b-4136-a143-6b6aa8152c57","Type":"ContainerStarted","Data":"31c0d64d06e866e28496d6e834c3236334be4fceac51f7b18d2b8e36b34db0d4"} Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.181299 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.182763 4899 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-hbstw container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.182800 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" podUID="19c551e7-757b-4136-a143-6b6aa8152c57" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.185603 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-brt2c" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.194443 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2-bound-sa-token\") pod \"ingress-operator-5b745b69d9-scmz7\" (UID: \"d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.201050 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.210472 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.215162 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aee31d1e-9049-4b14-9544-e26bc3ea2b38-bound-sa-token\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:47 crc kubenswrapper[4899]: W1003 08:42:47.217545 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c011ebc_362b_4893_9d7e_6a11a1d3e902.slice/crio-fc0b77bdab943f0cd0bc3d1a518adb3686da665797c0162551641349f3e3a5ed WatchSource:0}: Error finding container fc0b77bdab943f0cd0bc3d1a518adb3686da665797c0162551641349f3e3a5ed: Status 404 returned error can't find the container with id fc0b77bdab943f0cd0bc3d1a518adb3686da665797c0162551641349f3e3a5ed Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.234455 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.236336 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz" Oct 03 08:42:47 crc kubenswrapper[4899]: E1003 08:42:47.241346 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:47.741326537 +0000 UTC m=+141.848811490 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.250085 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-nrt8l" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.264816 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbfdm\" (UniqueName: \"kubernetes.io/projected/ae242b31-ad12-4328-8818-313458ed46aa-kube-api-access-gbfdm\") pod \"control-plane-machine-set-operator-78cbb6b69f-f8jvt\" (UID: \"ae242b31-ad12-4328-8818-313458ed46aa\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f8jvt" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.284049 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5h82\" (UniqueName: \"kubernetes.io/projected/f8fd1d26-6eac-46bf-bcba-06593430d823-kube-api-access-l5h82\") pod \"service-ca-operator-777779d784-mwh8q\" (UID: \"f8fd1d26-6eac-46bf-bcba-06593430d823\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mwh8q" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.301681 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-t9nkq\" (UID: \"4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t9nkq" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.318124 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47htd\" (UniqueName: \"kubernetes.io/projected/2206e9a3-b591-4d0a-aa09-9dc5b2e54342-kube-api-access-47htd\") pod \"downloads-7954f5f757-dpwsf\" (UID: \"2206e9a3-b591-4d0a-aa09-9dc5b2e54342\") " pod="openshift-console/downloads-7954f5f757-dpwsf" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.340636 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h69x5\" (UniqueName: \"kubernetes.io/projected/95e7f40a-6f2f-4689-829f-6d14e941ad9e-kube-api-access-h69x5\") pod \"apiserver-7bbb656c7d-5hl5h\" (UID: \"95e7f40a-6f2f-4689-829f-6d14e941ad9e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.345353 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:47 crc kubenswrapper[4899]: E1003 08:42:47.345652 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:47.845640309 +0000 UTC m=+141.953125252 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.355998 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76n9t\" (UniqueName: \"kubernetes.io/projected/eca40a31-8b9d-4825-82e2-253991771725-kube-api-access-76n9t\") pod \"service-ca-9c57cc56f-rmv8r\" (UID: \"eca40a31-8b9d-4825-82e2-253991771725\") " pod="openshift-service-ca/service-ca-9c57cc56f-rmv8r" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.361084 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tkpkx"] Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.377066 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5d7wt\" (UniqueName: \"kubernetes.io/projected/9f3aca40-1b42-4f22-ba89-a9b51d30aa75-kube-api-access-5d7wt\") pod \"dns-default-8j6kq\" (UID: \"9f3aca40-1b42-4f22-ba89-a9b51d30aa75\") " pod="openshift-dns/dns-default-8j6kq" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.410721 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmr59\" (UniqueName: \"kubernetes.io/projected/7a94e3ca-3b81-4dbf-a1db-73e6b752427c-kube-api-access-nmr59\") pod \"machine-config-operator-74547568cd-k95tt\" (UID: \"7a94e3ca-3b81-4dbf-a1db-73e6b752427c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.414313 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-kgddg"] Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.415949 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtn5v\" (UniqueName: \"kubernetes.io/projected/06b939f2-8591-4799-b59e-34cd8677aca6-kube-api-access-qtn5v\") pod \"multus-admission-controller-857f4d67dd-w4cgg\" (UID: \"06b939f2-8591-4799-b59e-34cd8677aca6\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-w4cgg" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.417560 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.431700 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.435848 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9r79f\" (UniqueName: \"kubernetes.io/projected/1f907faf-6eea-49c9-b601-b108e328d9d6-kube-api-access-9r79f\") pod \"machine-config-server-csm2k\" (UID: \"1f907faf-6eea-49c9-b601-b108e328d9d6\") " pod="openshift-machine-config-operator/machine-config-server-csm2k" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.446213 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:47 crc kubenswrapper[4899]: E1003 08:42:47.447374 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:47.947351431 +0000 UTC m=+142.054836394 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.451856 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-dpwsf" Oct 03 08:42:47 crc kubenswrapper[4899]: W1003 08:42:47.458199 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod524a0dd7_fc7e_41a5_9304_432c7b6e0624.slice/crio-78b900e26f01bd8a8af8bee3405898bc316d41725f315c2b09a5a8d423134f56 WatchSource:0}: Error finding container 78b900e26f01bd8a8af8bee3405898bc316d41725f315c2b09a5a8d423134f56: Status 404 returned error can't find the container with id 78b900e26f01bd8a8af8bee3405898bc316d41725f315c2b09a5a8d423134f56 Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.480812 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wlrw\" (UniqueName: \"kubernetes.io/projected/331490c6-4b8d-44cd-9081-5d9a7db1942a-kube-api-access-8wlrw\") pod \"csi-hostpathplugin-k9nrk\" (UID: \"331490c6-4b8d-44cd-9081-5d9a7db1942a\") " pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.493072 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.499329 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsrwv\" (UniqueName: \"kubernetes.io/projected/79762752-6337-410e-8bc7-c0a47aa5d773-kube-api-access-zsrwv\") pod \"ingress-canary-f45zh\" (UID: \"79762752-6337-410e-8bc7-c0a47aa5d773\") " pod="openshift-ingress-canary/ingress-canary-f45zh" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.509846 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vb65\" (UniqueName: \"kubernetes.io/projected/ad88b591-8c16-4151-92e0-5e2fd6d352f2-kube-api-access-6vb65\") pod \"package-server-manager-789f6589d5-mcjm4\" (UID: \"ad88b591-8c16-4151-92e0-5e2fd6d352f2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mcjm4" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.516453 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f8jvt" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.521564 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-kb4vv"] Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.522275 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5cs8\" (UniqueName: \"kubernetes.io/projected/264bb2e1-d946-4b30-9aa8-48cb9a9447e5-kube-api-access-d5cs8\") pod \"collect-profiles-29324670-2j7rr\" (UID: \"264bb2e1-d946-4b30-9aa8-48cb9a9447e5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.544456 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t9nkq" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.544969 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsxj6\" (UniqueName: \"kubernetes.io/projected/a4456e30-3eb3-4e1a-b22d-8888babf06a9-kube-api-access-lsxj6\") pod \"marketplace-operator-79b997595-np5qp\" (UID: \"a4456e30-3eb3-4e1a-b22d-8888babf06a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.550195 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:47 crc kubenswrapper[4899]: E1003 08:42:47.550585 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:48.050572678 +0000 UTC m=+142.158057631 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.558460 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mwh8q" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.564671 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-csm2k" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.572529 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.581101 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.601181 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-rmv8r" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.602528 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-w4cgg" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.607703 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mcjm4" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.614779 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.623374 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-8j6kq" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.653073 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:47 crc kubenswrapper[4899]: E1003 08:42:47.653519 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:48.153499998 +0000 UTC m=+142.260984951 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.653776 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.664204 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-f45zh" Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.749408 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r"] Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.754164 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:47 crc kubenswrapper[4899]: E1003 08:42:47.754516 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:48.254499667 +0000 UTC m=+142.361984620 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.809524 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-brt2c"] Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.857473 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:47 crc kubenswrapper[4899]: E1003 08:42:47.857858 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:48.357830588 +0000 UTC m=+142.465315541 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:47 crc kubenswrapper[4899]: W1003 08:42:47.938863 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03938235_9990_4806_a4c7_21d1c97cada5.slice/crio-0f259d92eeea7b8b9ae67e6fa9acd30d16ddf2baa0f8db755a43ee0321dfacac WatchSource:0}: Error finding container 0f259d92eeea7b8b9ae67e6fa9acd30d16ddf2baa0f8db755a43ee0321dfacac: Status 404 returned error can't find the container with id 0f259d92eeea7b8b9ae67e6fa9acd30d16ddf2baa0f8db755a43ee0321dfacac Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.943035 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7"] Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.964602 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:47 crc kubenswrapper[4899]: E1003 08:42:47.965234 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:48.465220936 +0000 UTC m=+142.572705889 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:47 crc kubenswrapper[4899]: I1003 08:42:47.999803 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w"] Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.051825 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-nrt8l"] Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.065776 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:48 crc kubenswrapper[4899]: E1003 08:42:48.066307 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:48.56606419 +0000 UTC m=+142.673549153 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.068129 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:48 crc kubenswrapper[4899]: E1003 08:42:48.068634 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:48.56860662 +0000 UTC m=+142.676091563 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.069470 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz"] Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.122988 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-dpwsf"] Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.172918 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:48 crc kubenswrapper[4899]: E1003 08:42:48.173365 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:48.673345005 +0000 UTC m=+142.780829948 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.196202 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w" event={"ID":"d2af99a7-4436-4e42-8418-03c1d8c503ad","Type":"ContainerStarted","Data":"5c39a29924fbafd2e7115c65040c8d4b7bde15c688ea3ccd06071c1dbdd898f2"} Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.199082 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7" event={"ID":"d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2","Type":"ContainerStarted","Data":"2481851f71ac6fc180076b735e57abbcc68b469917f3a448709a31ff51cf3381"} Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.204703 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kb4vv" event={"ID":"a2bfb93c-32c2-4dbe-b5a9-44f616a3e299","Type":"ContainerStarted","Data":"3f9d5795901d55ef42374007fbf37f4156efb3b155beac526dcd88810d7dccb1"} Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.207750 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-brt2c" event={"ID":"03938235-9990-4806-a4c7-21d1c97cada5","Type":"ContainerStarted","Data":"0f259d92eeea7b8b9ae67e6fa9acd30d16ddf2baa0f8db755a43ee0321dfacac"} Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.210602 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55cqf" event={"ID":"ed905c2a-053e-4094-bb3b-91412f1ec0f8","Type":"ContainerStarted","Data":"5ff8a721ea2b43d839ddb1f0d75226dc7be35864c4f699484166dfd0c32673da"} Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.213473 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zqzww" event={"ID":"1bf00f2e-253d-4ce1-afeb-e559137c9488","Type":"ContainerStarted","Data":"1f938ac59437068f36010353238fd7e4f17b1ea70a47e1610bc1de743265424e"} Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.216257 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kgddg" event={"ID":"524a0dd7-fc7e-41a5-9304-432c7b6e0624","Type":"ContainerStarted","Data":"78b900e26f01bd8a8af8bee3405898bc316d41725f315c2b09a5a8d423134f56"} Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.220758 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-nrt8l" event={"ID":"d183d7e2-7828-46a1-b80b-a1032e417265","Type":"ContainerStarted","Data":"62255e25acd9f67de60d72d54da85d6b282d9f3dc34df386082eb65ea2027222"} Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.223121 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" event={"ID":"c459aabe-55b9-415c-8782-8a112e9ea466","Type":"ContainerStarted","Data":"4e6f8a44b8364e344397f6d175bbd2accdfcda13496ae580a8fa62433e8cc05d"} Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.223288 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.223299 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" event={"ID":"c459aabe-55b9-415c-8782-8a112e9ea466","Type":"ContainerStarted","Data":"f50b6bf7840fa270fc68e98f93b640126527b36734ba5fc86f7c3f169efa29a0"} Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.228071 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-csm2k" event={"ID":"1f907faf-6eea-49c9-b601-b108e328d9d6","Type":"ContainerStarted","Data":"5e870748a979e02807fb6f38ce67c3b95dcb649961f8fee0672d909e90611136"} Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.234497 4899 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-tkpkx container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.35:6443/healthz\": dial tcp 10.217.0.35:6443: connect: connection refused" start-of-body= Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.234553 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" podUID="c459aabe-55b9-415c-8782-8a112e9ea466" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.35:6443/healthz\": dial tcp 10.217.0.35:6443: connect: connection refused" Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.239749 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-tfknb" event={"ID":"9c011ebc-362b-4893-9d7e-6a11a1d3e902","Type":"ContainerStarted","Data":"3766c6d4de958c749c77d15a8867acc059fa6fa32c80ceddbc74dc4cc0efa9c9"} Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.239936 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-tfknb" event={"ID":"9c011ebc-362b-4893-9d7e-6a11a1d3e902","Type":"ContainerStarted","Data":"fc0b77bdab943f0cd0bc3d1a518adb3686da665797c0162551641349f3e3a5ed"} Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.251680 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-v5crb" event={"ID":"d7f325c2-54b2-42b3-bcab-466aa71dc831","Type":"ContainerStarted","Data":"6021ab7990d3bdb80f29e2140ca6da5b33305281c66e35d8f449b4de1eb6ce51"} Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.258455 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf" event={"ID":"1e4eab70-7ebf-4f07-883b-8d08e0453ec1","Type":"ContainerStarted","Data":"9890eb7119746aded8067b93671fb8acd53fc0a0925f3f0b305ec1d67ea0574f"} Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.260301 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5j62" event={"ID":"1a9c7a81-f2a5-48a4-a202-008603e07184","Type":"ContainerStarted","Data":"3f1ab4589980327c68bc3d8b80cb54dc1215b1f43ff2baee5a0590ce79f95a77"} Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.260331 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5j62" event={"ID":"1a9c7a81-f2a5-48a4-a202-008603e07184","Type":"ContainerStarted","Data":"90c3b7e09c200f5c31c110e76fc3616e5d6a3b05efc8b2eb41b4d23b6b58ff3c"} Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.276373 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" event={"ID":"6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0","Type":"ContainerStarted","Data":"3eebfbc46528de312fc3b23a2c65ae6b2d78da6f2b184011cfc6f4a12d4961f0"} Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.277092 4899 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-hbstw container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.277140 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" podUID="19c551e7-757b-4136-a143-6b6aa8152c57" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.277840 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:48 crc kubenswrapper[4899]: E1003 08:42:48.279504 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:48.779492884 +0000 UTC m=+142.886977837 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.290172 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.378577 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:48 crc kubenswrapper[4899]: E1003 08:42:48.379165 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:48.879143101 +0000 UTC m=+142.986628064 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.379203 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:48 crc kubenswrapper[4899]: E1003 08:42:48.381005 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:48.880989129 +0000 UTC m=+142.988474082 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.481717 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:48 crc kubenswrapper[4899]: E1003 08:42:48.482091 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:48.9820736 +0000 UTC m=+143.089558553 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.585795 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:48 crc kubenswrapper[4899]: E1003 08:42:48.586724 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:49.086708892 +0000 UTC m=+143.194193845 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.650479 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-kzhtk" Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.690988 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:48 crc kubenswrapper[4899]: E1003 08:42:48.691337 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:49.191322294 +0000 UTC m=+143.298807247 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.745280 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-lw5xr" podStartSLOduration=122.745263445 podStartE2EDuration="2m2.745263445s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:48.743211191 +0000 UTC m=+142.850696134" watchObservedRunningTime="2025-10-03 08:42:48.745263445 +0000 UTC m=+142.852748398" Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.794032 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:48 crc kubenswrapper[4899]: E1003 08:42:48.795520 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:49.295505101 +0000 UTC m=+143.402990054 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.795536 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" podStartSLOduration=122.795514852 podStartE2EDuration="2m2.795514852s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:48.794720727 +0000 UTC m=+142.902205680" watchObservedRunningTime="2025-10-03 08:42:48.795514852 +0000 UTC m=+142.902999795" Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.837537 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-55cqf" podStartSLOduration=122.837522063 podStartE2EDuration="2m2.837522063s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:48.836215402 +0000 UTC m=+142.943700355" watchObservedRunningTime="2025-10-03 08:42:48.837522063 +0000 UTC m=+142.945007016" Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.872862 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-tfknb" podStartSLOduration=122.872847068 podStartE2EDuration="2m2.872847068s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:48.872774466 +0000 UTC m=+142.980259419" watchObservedRunningTime="2025-10-03 08:42:48.872847068 +0000 UTC m=+142.980332021" Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.896093 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:48 crc kubenswrapper[4899]: E1003 08:42:48.896639 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:49.396622744 +0000 UTC m=+143.504107697 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.951074 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt"] Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.998140 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:48 crc kubenswrapper[4899]: E1003 08:42:48.998763 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:49.498749829 +0000 UTC m=+143.606234782 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:48 crc kubenswrapper[4899]: I1003 08:42:48.998801 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f8jvt"] Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.019235 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" podStartSLOduration=123.019219893 podStartE2EDuration="2m3.019219893s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:48.995758906 +0000 UTC m=+143.103243859" watchObservedRunningTime="2025-10-03 08:42:49.019219893 +0000 UTC m=+143.126704846" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.019335 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h"] Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.033529 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-462ht" podStartSLOduration=123.033513116 podStartE2EDuration="2m3.033513116s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:49.029818952 +0000 UTC m=+143.137303895" watchObservedRunningTime="2025-10-03 08:42:49.033513116 +0000 UTC m=+143.140998069" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.079558 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-kzhtk" podStartSLOduration=123.079540682 podStartE2EDuration="2m3.079540682s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:49.076869499 +0000 UTC m=+143.184354462" watchObservedRunningTime="2025-10-03 08:42:49.079540682 +0000 UTC m=+143.187025635" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.112430 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:49 crc kubenswrapper[4899]: E1003 08:42:49.112738 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:49.61272137 +0000 UTC m=+143.720206323 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.122972 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t9nkq"] Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.136000 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zqzww" podStartSLOduration=123.135980891 podStartE2EDuration="2m3.135980891s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:49.11757811 +0000 UTC m=+143.225063053" watchObservedRunningTime="2025-10-03 08:42:49.135980891 +0000 UTC m=+143.243465844" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.178311 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-k9nrk"] Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.182312 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.185211 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-w4cgg"] Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.202295 4899 patch_prober.go:28] interesting pod/router-default-5444994796-tfknb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 08:42:49 crc kubenswrapper[4899]: [-]has-synced failed: reason withheld Oct 03 08:42:49 crc kubenswrapper[4899]: [+]process-running ok Oct 03 08:42:49 crc kubenswrapper[4899]: healthz check failed Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.202347 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tfknb" podUID="9c011ebc-362b-4893-9d7e-6a11a1d3e902" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.213817 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:49 crc kubenswrapper[4899]: E1003 08:42:49.217079 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:49.717064003 +0000 UTC m=+143.824548956 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.226164 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mcjm4"] Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.236881 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-8j6kq"] Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.258011 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-np5qp"] Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.258058 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr"] Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.312141 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z2lfm" podStartSLOduration=123.312121279 podStartE2EDuration="2m3.312121279s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:49.243325746 +0000 UTC m=+143.350810699" watchObservedRunningTime="2025-10-03 08:42:49.312121279 +0000 UTC m=+143.419606232" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.319055 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:49 crc kubenswrapper[4899]: E1003 08:42:49.319284 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:49.819249529 +0000 UTC m=+143.926734492 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.319741 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:49 crc kubenswrapper[4899]: E1003 08:42:49.320473 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:49.820457706 +0000 UTC m=+143.927942659 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.347387 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-pmzmr"] Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.353479 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" podStartSLOduration=123.35345327900001 podStartE2EDuration="2m3.353453279s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:49.34476454 +0000 UTC m=+143.452249493" watchObservedRunningTime="2025-10-03 08:42:49.353453279 +0000 UTC m=+143.460938232" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.354015 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-mwh8q"] Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.385759 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rmv8r"] Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.393475 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-f45zh"] Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.403424 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-v5crb" event={"ID":"d7f325c2-54b2-42b3-bcab-466aa71dc831","Type":"ContainerStarted","Data":"9bf4d6f014614b31a341f4121eefa04fd6a2c0934fe6fd62853114064110bebd"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.404217 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-xg5bn" podStartSLOduration=123.404200361 podStartE2EDuration="2m3.404200361s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:49.40221521 +0000 UTC m=+143.509700163" watchObservedRunningTime="2025-10-03 08:42:49.404200361 +0000 UTC m=+143.511685314" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.421879 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:49 crc kubenswrapper[4899]: E1003 08:42:49.423115 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:49.923101736 +0000 UTC m=+144.030586689 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.425931 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-csm2k" event={"ID":"1f907faf-6eea-49c9-b601-b108e328d9d6","Type":"ContainerStarted","Data":"a21ebe42a24bc030994bbf5303b2b97f1981130fcd080330b2fb71d5e1665a56"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.447094 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-llzvf" podStartSLOduration=123.445711237 podStartE2EDuration="2m3.445711237s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:49.440670942 +0000 UTC m=+143.548155895" watchObservedRunningTime="2025-10-03 08:42:49.445711237 +0000 UTC m=+143.553196190" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.468071 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-k28dm" podStartSLOduration=123.46805492 podStartE2EDuration="2m3.46805492s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:49.467599565 +0000 UTC m=+143.575084518" watchObservedRunningTime="2025-10-03 08:42:49.46805492 +0000 UTC m=+143.575539873" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.502647 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kb4vv" event={"ID":"a2bfb93c-32c2-4dbe-b5a9-44f616a3e299","Type":"ContainerStarted","Data":"368cfd341ea3ee1e686879f08306af4496e8adbd53dda5ce702ba09ed5f9c4c0"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.502701 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kb4vv" event={"ID":"a2bfb93c-32c2-4dbe-b5a9-44f616a3e299","Type":"ContainerStarted","Data":"6914d56d0dfaa8448d8f62ca2589d549b27686fbf5ae66e2764d68e37f9b9c8b"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.503655 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-b8pqc" podStartSLOduration=124.503639242 podStartE2EDuration="2m4.503639242s" podCreationTimestamp="2025-10-03 08:40:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:49.502585089 +0000 UTC m=+143.610070042" watchObservedRunningTime="2025-10-03 08:42:49.503639242 +0000 UTC m=+143.611124195" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.528363 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:49 crc kubenswrapper[4899]: E1003 08:42:49.529728 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:50.02971727 +0000 UTC m=+144.137202223 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.577104 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-brt2c" event={"ID":"03938235-9990-4806-a4c7-21d1c97cada5","Type":"ContainerStarted","Data":"b3bf004088e1241c84678449203d62a834ed927d6a49511ad013df3b89d0ae3b"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.615442 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2ghv5" podStartSLOduration=123.615409365 podStartE2EDuration="2m3.615409365s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:49.603581978 +0000 UTC m=+143.711066931" watchObservedRunningTime="2025-10-03 08:42:49.615409365 +0000 UTC m=+143.722894308" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.630182 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:49 crc kubenswrapper[4899]: E1003 08:42:49.631183 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:50.131167973 +0000 UTC m=+144.238652926 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.650480 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-r5j62" podStartSLOduration=123.650461311 podStartE2EDuration="2m3.650461311s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:49.648113098 +0000 UTC m=+143.755598051" watchObservedRunningTime="2025-10-03 08:42:49.650461311 +0000 UTC m=+143.757946264" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.696728 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-dpwsf" event={"ID":"2206e9a3-b591-4d0a-aa09-9dc5b2e54342","Type":"ContainerStarted","Data":"1fa830a12c464e25f191d30996522eecbe623f38c9dcea4e584e729878c00d52"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.696776 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-dpwsf" event={"ID":"2206e9a3-b591-4d0a-aa09-9dc5b2e54342","Type":"ContainerStarted","Data":"c75e2848fdadf7fe50b736cd5bfe58f53103867b2f518131b77e8beabb2dd30a"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.696811 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-dpwsf" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.707824 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-nrt8l" event={"ID":"d183d7e2-7828-46a1-b80b-a1032e417265","Type":"ContainerStarted","Data":"68984142b4600dce52b35bf89840843207b0cd297e5eb602c6767d3caca72672"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.729458 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w" event={"ID":"d2af99a7-4436-4e42-8418-03c1d8c503ad","Type":"ContainerStarted","Data":"fcfaea6689992f0b6ca9a1ec47289d4c3f583dd133a6278cb6c2ac469407d680"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.730552 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.731019 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:49 crc kubenswrapper[4899]: E1003 08:42:49.731355 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:50.231343777 +0000 UTC m=+144.338828730 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.751686 4899 patch_prober.go:28] interesting pod/downloads-7954f5f757-dpwsf container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" start-of-body= Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.751748 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dpwsf" podUID="2206e9a3-b591-4d0a-aa09-9dc5b2e54342" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.773220 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t9nkq" event={"ID":"4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349","Type":"ContainerStarted","Data":"4b251c5c314c76e9da8bb6e55b7ae2cfb470474e535590ec7962c7251468dda9"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.780147 4899 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-4cz5w container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.780221 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w" podUID="d2af99a7-4436-4e42-8418-03c1d8c503ad" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.788731 4899 generic.go:334] "Generic (PLEG): container finished" podID="524a0dd7-fc7e-41a5-9304-432c7b6e0624" containerID="dd3c98c7383f758a928e5f64fab5e606a4ed14bd8f81ee529585c4c711cd19b8" exitCode=0 Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.788832 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kgddg" event={"ID":"524a0dd7-fc7e-41a5-9304-432c7b6e0624","Type":"ContainerDied","Data":"dd3c98c7383f758a928e5f64fab5e606a4ed14bd8f81ee529585c4c711cd19b8"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.793478 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" event={"ID":"95e7f40a-6f2f-4689-829f-6d14e941ad9e","Type":"ContainerStarted","Data":"0b8274583ee2c7bc02e4893438d460beb6e908fcf29e27afa9eaa344a92a37a9"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.826678 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt" event={"ID":"7a94e3ca-3b81-4dbf-a1db-73e6b752427c","Type":"ContainerStarted","Data":"4ba7db5f7cf715e0a1b722da55997caac56745182fe3621df1f13d15cf29a3e5"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.827745 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-dpwsf" podStartSLOduration=123.827735673 podStartE2EDuration="2m3.827735673s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:49.82728502 +0000 UTC m=+143.934769973" watchObservedRunningTime="2025-10-03 08:42:49.827735673 +0000 UTC m=+143.935220626" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.831666 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:49 crc kubenswrapper[4899]: E1003 08:42:49.833845 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:50.333830112 +0000 UTC m=+144.441315065 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.839163 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:49 crc kubenswrapper[4899]: E1003 08:42:49.840572 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:50.340558081 +0000 UTC m=+144.448043034 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.869833 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mcjm4" event={"ID":"ad88b591-8c16-4151-92e0-5e2fd6d352f2","Type":"ContainerStarted","Data":"05ceb97ea015b6b2f0bfb9aa4c1f9610714b5f6ccdb349778db2eaee526fa0c9"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.907324 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f8jvt" event={"ID":"ae242b31-ad12-4328-8818-313458ed46aa","Type":"ContainerStarted","Data":"63ae0fc292e2e4c4d119c81796f6d7028c4b8ce9b186dc18fa51f1062da2d3c1"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.910387 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-brt2c" podStartSLOduration=123.910372284 podStartE2EDuration="2m3.910372284s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:49.909364843 +0000 UTC m=+144.016849796" watchObservedRunningTime="2025-10-03 08:42:49.910372284 +0000 UTC m=+144.017857237" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.912164 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-w4cgg" event={"ID":"06b939f2-8591-4799-b59e-34cd8677aca6","Type":"ContainerStarted","Data":"24a19e58b4a87158a2828cacfab1c0af36ad6350fab367fffd9f29a8228c43c2"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.947507 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7" event={"ID":"d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2","Type":"ContainerStarted","Data":"98a1f426742ba5fafb8ab6a0f94afdb15a0fbe3b000d3ac2b3ee8d34af19ef10"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.949301 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:49 crc kubenswrapper[4899]: E1003 08:42:49.950220 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:50.450204948 +0000 UTC m=+144.557689901 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.970350 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz" event={"ID":"badd5177-888b-409f-abe1-ffb6902a436b","Type":"ContainerStarted","Data":"f340197a135d9b49b39fa957cfbd85a51efd0c3ec58fb7c3406778fa27aa23ba"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.970805 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz" event={"ID":"badd5177-888b-409f-abe1-ffb6902a436b","Type":"ContainerStarted","Data":"a1329cec6531f4c3bcc26b720cfe69cae0270bbe9b988ec15a3bfd36d471d8f4"} Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.971397 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz" Oct 03 08:42:49 crc kubenswrapper[4899]: I1003 08:42:49.977423 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-csm2k" podStartSLOduration=5.977410911 podStartE2EDuration="5.977410911s" podCreationTimestamp="2025-10-03 08:42:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:49.976815113 +0000 UTC m=+144.084300066" watchObservedRunningTime="2025-10-03 08:42:49.977410911 +0000 UTC m=+144.084895854" Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.000328 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" event={"ID":"6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0","Type":"ContainerStarted","Data":"6fe4c219b741b27c45c71669c035ad5e8614fc118a4c5b142237982dd0c0ad51"} Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.000450 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.019399 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.029478 4899 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-wmlxz container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.029539 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz" podUID="badd5177-888b-409f-abe1-ffb6902a436b" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.057628 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:50 crc kubenswrapper[4899]: E1003 08:42:50.059850 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:50.559820225 +0000 UTC m=+144.667305168 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.118571 4899 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-5mh7r container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:5443/healthz\": dial tcp 10.217.0.21:5443: connect: connection refused" start-of-body= Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.118850 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" podUID="6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.21:5443/healthz\": dial tcp 10.217.0.21:5443: connect: connection refused" Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.156404 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-nrt8l" podStartSLOduration=124.156383647 podStartE2EDuration="2m4.156383647s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:50.019598978 +0000 UTC m=+144.127083931" watchObservedRunningTime="2025-10-03 08:42:50.156383647 +0000 UTC m=+144.263868600" Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.168448 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:50 crc kubenswrapper[4899]: E1003 08:42:50.168768 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:50.66874895 +0000 UTC m=+144.776233903 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.168922 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:50 crc kubenswrapper[4899]: E1003 08:42:50.169315 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:50.669305717 +0000 UTC m=+144.776790670 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.182154 4899 patch_prober.go:28] interesting pod/router-default-5444994796-tfknb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 08:42:50 crc kubenswrapper[4899]: [-]has-synced failed: reason withheld Oct 03 08:42:50 crc kubenswrapper[4899]: [+]process-running ok Oct 03 08:42:50 crc kubenswrapper[4899]: healthz check failed Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.186964 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tfknb" podUID="9c011ebc-362b-4893-9d7e-6a11a1d3e902" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.187057 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-v5crb" podStartSLOduration=124.187042526 podStartE2EDuration="2m4.187042526s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:50.18651441 +0000 UTC m=+144.293999363" watchObservedRunningTime="2025-10-03 08:42:50.187042526 +0000 UTC m=+144.294527469" Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.187282 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w" podStartSLOduration=124.187277954 podStartE2EDuration="2m4.187277954s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:50.104336264 +0000 UTC m=+144.211821217" watchObservedRunningTime="2025-10-03 08:42:50.187277954 +0000 UTC m=+144.294762907" Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.212056 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kb4vv" podStartSLOduration=124.212043771 podStartE2EDuration="2m4.212043771s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:50.211582266 +0000 UTC m=+144.319067219" watchObservedRunningTime="2025-10-03 08:42:50.212043771 +0000 UTC m=+144.319528724" Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.271864 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:50 crc kubenswrapper[4899]: E1003 08:42:50.272326 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:50.772309598 +0000 UTC m=+144.879794551 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.316252 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz" podStartSLOduration=124.316235529 podStartE2EDuration="2m4.316235529s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:50.265052243 +0000 UTC m=+144.372537196" watchObservedRunningTime="2025-10-03 08:42:50.316235529 +0000 UTC m=+144.423720482" Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.356187 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7" podStartSLOduration=124.356170887 podStartE2EDuration="2m4.356170887s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:50.317022294 +0000 UTC m=+144.424507247" watchObservedRunningTime="2025-10-03 08:42:50.356170887 +0000 UTC m=+144.463655850" Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.373035 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:50 crc kubenswrapper[4899]: E1003 08:42:50.373425 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:50.87341272 +0000 UTC m=+144.980897673 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.385118 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f8jvt" podStartSLOduration=124.385099693 podStartE2EDuration="2m4.385099693s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:50.384399121 +0000 UTC m=+144.491884074" watchObservedRunningTime="2025-10-03 08:42:50.385099693 +0000 UTC m=+144.492584636" Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.421798 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" podStartSLOduration=124.421780599 podStartE2EDuration="2m4.421780599s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:50.420364295 +0000 UTC m=+144.527849238" watchObservedRunningTime="2025-10-03 08:42:50.421780599 +0000 UTC m=+144.529265552" Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.473572 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:50 crc kubenswrapper[4899]: E1003 08:42:50.473828 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:50.973812841 +0000 UTC m=+145.081297784 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.574590 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:50 crc kubenswrapper[4899]: E1003 08:42:50.575177 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:51.075164532 +0000 UTC m=+145.182649485 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.676117 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:50 crc kubenswrapper[4899]: E1003 08:42:50.676870 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:51.176844462 +0000 UTC m=+145.284329405 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.779137 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:50 crc kubenswrapper[4899]: E1003 08:42:50.780119 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:51.280101371 +0000 UTC m=+145.387586324 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.801343 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.882460 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:50 crc kubenswrapper[4899]: E1003 08:42:50.882770 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:51.382752972 +0000 UTC m=+145.490237925 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:50 crc kubenswrapper[4899]: I1003 08:42:50.984609 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:50 crc kubenswrapper[4899]: E1003 08:42:50.984957 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:51.484941118 +0000 UTC m=+145.592426071 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.033942 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mcjm4" event={"ID":"ad88b591-8c16-4151-92e0-5e2fd6d352f2","Type":"ContainerStarted","Data":"4bafed0546f9dd41e6d5b3b4644f0081ffa8a72de3efc479a5c4055bfa02e129"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.034265 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mcjm4" event={"ID":"ad88b591-8c16-4151-92e0-5e2fd6d352f2","Type":"ContainerStarted","Data":"7ab0c63444f42ab2d1c2caaf3af70c6db9dc54a52f40a81ce80393898e6bc6f6"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.034308 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mcjm4" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.059736 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" event={"ID":"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6","Type":"ContainerStarted","Data":"5188a7f10aedcd3eff20f417215083a1967621b92b46b6623ea23f04db46bb1f"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.059785 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" event={"ID":"9d271c75-e1f6-4cf1-8be9-2ef6e27066d6","Type":"ContainerStarted","Data":"da4920e68dfe57b13bac1d60c08d85ad77346673984faf6913177803a0892a2d"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.074601 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mcjm4" podStartSLOduration=125.074582396 podStartE2EDuration="2m5.074582396s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:51.074237855 +0000 UTC m=+145.181722798" watchObservedRunningTime="2025-10-03 08:42:51.074582396 +0000 UTC m=+145.182067349" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.088972 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.089023 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.089042 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.092819 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt" event={"ID":"7a94e3ca-3b81-4dbf-a1db-73e6b752427c","Type":"ContainerStarted","Data":"d3bc448747a92c5355b8809d49ea33d10abf6d20a3e645fe6920ef39fa680428"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.092873 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt" event={"ID":"7a94e3ca-3b81-4dbf-a1db-73e6b752427c","Type":"ContainerStarted","Data":"efb04a7f3e89ade41d525887f97df88e13d85f15b37fe2085e0f0808f60d89e0"} Oct 03 08:42:51 crc kubenswrapper[4899]: E1003 08:42:51.093696 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:51.593671407 +0000 UTC m=+145.701156360 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.107792 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-pmzmr" podStartSLOduration=125.107770484 podStartE2EDuration="2m5.107770484s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:51.104957137 +0000 UTC m=+145.212442090" watchObservedRunningTime="2025-10-03 08:42:51.107770484 +0000 UTC m=+145.215255437" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.139803 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k95tt" podStartSLOduration=125.139783696 podStartE2EDuration="2m5.139783696s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:51.137507055 +0000 UTC m=+145.244992008" watchObservedRunningTime="2025-10-03 08:42:51.139783696 +0000 UTC m=+145.247268649" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.152109 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-scmz7" event={"ID":"d6c10a45-5a0d-4f5b-931b-0eb351ddd8b2","Type":"ContainerStarted","Data":"ca67e3af1644265545414372068ff2279e3c55729d069e67b94459e7513e4cc7"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.168671 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-nrt8l" event={"ID":"d183d7e2-7828-46a1-b80b-a1032e417265","Type":"ContainerStarted","Data":"e655b77890a6c92434a419ba40c9e8c008671f3ec90c12b2b4af790475633798"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.175631 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-8j6kq" event={"ID":"9f3aca40-1b42-4f22-ba89-a9b51d30aa75","Type":"ContainerStarted","Data":"dcd9f52b37cd6f67b4672219eaf738b104059e329a99971a04d494c3210309c4"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.175676 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-8j6kq" event={"ID":"9f3aca40-1b42-4f22-ba89-a9b51d30aa75","Type":"ContainerStarted","Data":"2efdd7b3adb3d2c48b2c9d7d572cef3c4c6a3bf944dfd033373f26458cec669e"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.182562 4899 patch_prober.go:28] interesting pod/router-default-5444994796-tfknb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 08:42:51 crc kubenswrapper[4899]: [-]has-synced failed: reason withheld Oct 03 08:42:51 crc kubenswrapper[4899]: [+]process-running ok Oct 03 08:42:51 crc kubenswrapper[4899]: healthz check failed Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.182934 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tfknb" podUID="9c011ebc-362b-4893-9d7e-6a11a1d3e902" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.184038 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mwh8q" event={"ID":"f8fd1d26-6eac-46bf-bcba-06593430d823","Type":"ContainerStarted","Data":"ce922f0afe9af79022bb5683825c728c5d0a961bd9fba6efd64ee4fa3db8c96c"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.184154 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mwh8q" event={"ID":"f8fd1d26-6eac-46bf-bcba-06593430d823","Type":"ContainerStarted","Data":"b4b3677b17d3dc091e2ffb88b331f2ac3f4a4d7a391cc9291d3eb5b4365d4814"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.201912 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.204350 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" event={"ID":"331490c6-4b8d-44cd-9081-5d9a7db1942a","Type":"ContainerStarted","Data":"6fa23cb1d535344f5fb84e7fe177c2187bb4620526a41c69e892a49bfa1c7758"} Oct 03 08:42:51 crc kubenswrapper[4899]: E1003 08:42:51.205065 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:51.705051458 +0000 UTC m=+145.812536411 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.206173 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mwh8q" podStartSLOduration=125.206155323 podStartE2EDuration="2m5.206155323s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:51.204174001 +0000 UTC m=+145.311658954" watchObservedRunningTime="2025-10-03 08:42:51.206155323 +0000 UTC m=+145.313640276" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.251376 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" event={"ID":"a4456e30-3eb3-4e1a-b22d-8888babf06a9","Type":"ContainerStarted","Data":"05db358dffce67ac8f6ccc58b2d2b68e7d4865d099c2c4c98067f30b1c1d8593"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.251696 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" event={"ID":"a4456e30-3eb3-4e1a-b22d-8888babf06a9","Type":"ContainerStarted","Data":"414a450044bc41863042b259ece734bb8af36b89ce37c51e08f292d8dee2290c"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.252047 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.276038 4899 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-np5qp container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.42:8080/healthz\": dial tcp 10.217.0.42:8080: connect: connection refused" start-of-body= Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.276090 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" podUID="a4456e30-3eb3-4e1a-b22d-8888babf06a9" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.42:8080/healthz\": dial tcp 10.217.0.42:8080: connect: connection refused" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.289909 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kgddg" event={"ID":"524a0dd7-fc7e-41a5-9304-432c7b6e0624","Type":"ContainerStarted","Data":"4cc9fd53b2216048c7d22a64899c60fc6a2dd3987d490d0aa9540b4f88a09d75"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.290495 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kgddg" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.303083 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:51 crc kubenswrapper[4899]: E1003 08:42:51.304771 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:51.804751348 +0000 UTC m=+145.912236301 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.309812 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-w4cgg" event={"ID":"06b939f2-8591-4799-b59e-34cd8677aca6","Type":"ContainerStarted","Data":"684437c905d79e0913c10b56975e6f40fa1534a7d1ba79eb12a9eef466fbff49"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.322385 4899 generic.go:334] "Generic (PLEG): container finished" podID="95e7f40a-6f2f-4689-829f-6d14e941ad9e" containerID="fd8ce8adea8611ef5bc8632125c18163f1f6731966562bd24378aa8ef5616dd6" exitCode=0 Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.322498 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" event={"ID":"95e7f40a-6f2f-4689-829f-6d14e941ad9e","Type":"ContainerDied","Data":"fd8ce8adea8611ef5bc8632125c18163f1f6731966562bd24378aa8ef5616dd6"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.324041 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" podStartSLOduration=125.324026975 podStartE2EDuration="2m5.324026975s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:51.321341541 +0000 UTC m=+145.428826494" watchObservedRunningTime="2025-10-03 08:42:51.324026975 +0000 UTC m=+145.431511928" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.345463 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-rmv8r" event={"ID":"eca40a31-8b9d-4825-82e2-253991771725","Type":"ContainerStarted","Data":"5158cffa8d32c8c4be55901a562d5c0f3abf535e62f8a18f5862f95296c8010a"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.345505 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-rmv8r" event={"ID":"eca40a31-8b9d-4825-82e2-253991771725","Type":"ContainerStarted","Data":"14fae53c52701ba1fc84c44e254bf1055e7fdb70293672c460696f87bb2872c9"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.367836 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr" event={"ID":"264bb2e1-d946-4b30-9aa8-48cb9a9447e5","Type":"ContainerStarted","Data":"c71db21c2b595c777be240aa64dd0636db22025cd3ee053c30f92ff51a8efc4b"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.367903 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr" event={"ID":"264bb2e1-d946-4b30-9aa8-48cb9a9447e5","Type":"ContainerStarted","Data":"81c883c1c318c30266fa5b224eec3205d4756eadf2cc73dc64359d4b3177af74"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.386642 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-f45zh" event={"ID":"79762752-6337-410e-8bc7-c0a47aa5d773","Type":"ContainerStarted","Data":"0bc8c8fb4bf24ee8fa477188da0fb50604a1f76523f0e08faeb27135a0080003"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.386696 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-f45zh" event={"ID":"79762752-6337-410e-8bc7-c0a47aa5d773","Type":"ContainerStarted","Data":"74e568e8a9c4912c8632b8dc726c15ac05c6303af5a00bd908e8662bcb0d4ad2"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.403101 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t9nkq" event={"ID":"4cc4b2c6-3ef1-41a5-8f47-16f20c6b2349","Type":"ContainerStarted","Data":"8e14d8e153cc79e74584acd64d0a215c1ed7a3e2ddef0010aa0db612a15e831a"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.404423 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:51 crc kubenswrapper[4899]: E1003 08:42:51.405849 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:51.905838349 +0000 UTC m=+146.013323302 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.454203 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-f8jvt" event={"ID":"ae242b31-ad12-4328-8818-313458ed46aa","Type":"ContainerStarted","Data":"ce78ebacb9c09fda3a6a3e7ffbc247ca2a42fa760a25fc439f26196651987d86"} Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.464226 4899 patch_prober.go:28] interesting pod/downloads-7954f5f757-dpwsf container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" start-of-body= Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.464281 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dpwsf" podUID="2206e9a3-b591-4d0a-aa09-9dc5b2e54342" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.485270 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wmlxz" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.509423 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:51 crc kubenswrapper[4899]: E1003 08:42:51.509500 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:52.00948405 +0000 UTC m=+146.116969003 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.511525 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:51 crc kubenswrapper[4899]: E1003 08:42:51.517834 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:52.017819468 +0000 UTC m=+146.125304421 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.522859 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4cz5w" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.538289 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kgddg" podStartSLOduration=125.538273273 podStartE2EDuration="2m5.538273273s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:51.531295736 +0000 UTC m=+145.638780689" watchObservedRunningTime="2025-10-03 08:42:51.538273273 +0000 UTC m=+145.645758226" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.539307 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-w4cgg" podStartSLOduration=125.539299674 podStartE2EDuration="2m5.539299674s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:51.402508626 +0000 UTC m=+145.509993579" watchObservedRunningTime="2025-10-03 08:42:51.539299674 +0000 UTC m=+145.646784627" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.613785 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:51 crc kubenswrapper[4899]: E1003 08:42:51.614154 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:52.114126363 +0000 UTC m=+146.221611316 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.626906 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr" podStartSLOduration=125.626865137 podStartE2EDuration="2m5.626865137s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:51.620412868 +0000 UTC m=+145.727897821" watchObservedRunningTime="2025-10-03 08:42:51.626865137 +0000 UTC m=+145.734350090" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.715340 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:51 crc kubenswrapper[4899]: E1003 08:42:51.715671 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:52.215654439 +0000 UTC m=+146.323139392 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.744813 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-f45zh" podStartSLOduration=7.744799072 podStartE2EDuration="7.744799072s" podCreationTimestamp="2025-10-03 08:42:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:51.744774021 +0000 UTC m=+145.852258974" watchObservedRunningTime="2025-10-03 08:42:51.744799072 +0000 UTC m=+145.852284025" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.815759 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:51 crc kubenswrapper[4899]: E1003 08:42:51.816024 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:52.315995787 +0000 UTC m=+146.423480740 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.816364 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:51 crc kubenswrapper[4899]: E1003 08:42:51.816666 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:52.316655198 +0000 UTC m=+146.424140141 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.843830 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-rmv8r" podStartSLOduration=125.84381173 podStartE2EDuration="2m5.84381173s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:51.842135797 +0000 UTC m=+145.949620760" watchObservedRunningTime="2025-10-03 08:42:51.84381173 +0000 UTC m=+145.951296673" Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.916917 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:51 crc kubenswrapper[4899]: E1003 08:42:51.917145 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:52.417129271 +0000 UTC m=+146.524614224 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:51 crc kubenswrapper[4899]: I1003 08:42:51.974870 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t9nkq" podStartSLOduration=125.974851519 podStartE2EDuration="2m5.974851519s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:51.958510064 +0000 UTC m=+146.065995017" watchObservedRunningTime="2025-10-03 08:42:51.974851519 +0000 UTC m=+146.082336472" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.018147 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:52 crc kubenswrapper[4899]: E1003 08:42:52.018550 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:52.518533343 +0000 UTC m=+146.626018296 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.118964 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:52 crc kubenswrapper[4899]: E1003 08:42:52.119141 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:52.619110879 +0000 UTC m=+146.726595832 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.119437 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:52 crc kubenswrapper[4899]: E1003 08:42:52.119786 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:52.6197721 +0000 UTC m=+146.727257053 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.180976 4899 patch_prober.go:28] interesting pod/router-default-5444994796-tfknb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 08:42:52 crc kubenswrapper[4899]: [-]has-synced failed: reason withheld Oct 03 08:42:52 crc kubenswrapper[4899]: [+]process-running ok Oct 03 08:42:52 crc kubenswrapper[4899]: healthz check failed Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.181043 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tfknb" podUID="9c011ebc-362b-4893-9d7e-6a11a1d3e902" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.220051 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:52 crc kubenswrapper[4899]: E1003 08:42:52.220226 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:52.720193331 +0000 UTC m=+146.827678284 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.220349 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:52 crc kubenswrapper[4899]: E1003 08:42:52.220712 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:52.720702527 +0000 UTC m=+146.828187550 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.321073 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:52 crc kubenswrapper[4899]: E1003 08:42:52.321317 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:52.821284283 +0000 UTC m=+146.928769246 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.394494 4899 patch_prober.go:28] interesting pod/apiserver-76f77b778f-v5crb container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Oct 03 08:42:52 crc kubenswrapper[4899]: [+]log ok Oct 03 08:42:52 crc kubenswrapper[4899]: [+]etcd ok Oct 03 08:42:52 crc kubenswrapper[4899]: [+]poststarthook/start-apiserver-admission-initializer ok Oct 03 08:42:52 crc kubenswrapper[4899]: [+]poststarthook/generic-apiserver-start-informers ok Oct 03 08:42:52 crc kubenswrapper[4899]: [+]poststarthook/max-in-flight-filter ok Oct 03 08:42:52 crc kubenswrapper[4899]: [+]poststarthook/storage-object-count-tracker-hook ok Oct 03 08:42:52 crc kubenswrapper[4899]: [+]poststarthook/image.openshift.io-apiserver-caches ok Oct 03 08:42:52 crc kubenswrapper[4899]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Oct 03 08:42:52 crc kubenswrapper[4899]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Oct 03 08:42:52 crc kubenswrapper[4899]: [+]poststarthook/project.openshift.io-projectcache ok Oct 03 08:42:52 crc kubenswrapper[4899]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Oct 03 08:42:52 crc kubenswrapper[4899]: [+]poststarthook/openshift.io-startinformers ok Oct 03 08:42:52 crc kubenswrapper[4899]: [+]poststarthook/openshift.io-restmapperupdater ok Oct 03 08:42:52 crc kubenswrapper[4899]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Oct 03 08:42:52 crc kubenswrapper[4899]: livez check failed Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.394584 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-v5crb" podUID="d7f325c2-54b2-42b3-bcab-466aa71dc831" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.422511 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:52 crc kubenswrapper[4899]: E1003 08:42:52.422903 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:52.922871601 +0000 UTC m=+147.030356554 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.458360 4899 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-5mh7r container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.458424 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" podUID="6a4c32f3-acb2-45e6-99d3-4ff78be9b3b0" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.21:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.466774 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" event={"ID":"331490c6-4b8d-44cd-9081-5d9a7db1942a","Type":"ContainerStarted","Data":"a7a700ebce96175937455d019ab1887fa3bb6622f39245bb17beb87cfc845265"} Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.469036 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-w4cgg" event={"ID":"06b939f2-8591-4799-b59e-34cd8677aca6","Type":"ContainerStarted","Data":"90b9f70027e9c400bf407e2253e315a2a44302de3c807615dddc2c9b62b2fad5"} Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.470724 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" event={"ID":"95e7f40a-6f2f-4689-829f-6d14e941ad9e","Type":"ContainerStarted","Data":"4d2b93dd49cd2c8ea285233e44d2144d09e13b303cd8616b600844188feadaef"} Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.472536 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-8j6kq" event={"ID":"9f3aca40-1b42-4f22-ba89-a9b51d30aa75","Type":"ContainerStarted","Data":"e4c1183b4de1a73a911b8f282bfc358834bea22b3dcf039127c95d5f858e6f96"} Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.473972 4899 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-np5qp container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.42:8080/healthz\": dial tcp 10.217.0.42:8080: connect: connection refused" start-of-body= Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.474006 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" podUID="a4456e30-3eb3-4e1a-b22d-8888babf06a9" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.42:8080/healthz\": dial tcp 10.217.0.42:8080: connect: connection refused" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.493733 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.493776 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.495670 4899 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-5hl5h container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="Get \"https://10.217.0.24:8443/livez\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.495809 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" podUID="95e7f40a-6f2f-4689-829f-6d14e941ad9e" containerName="oauth-apiserver" probeResult="failure" output="Get \"https://10.217.0.24:8443/livez\": dial tcp 10.217.0.24:8443: connect: connection refused" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.524291 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:52 crc kubenswrapper[4899]: E1003 08:42:52.524445 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.024423617 +0000 UTC m=+147.131908570 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.524976 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:52 crc kubenswrapper[4899]: E1003 08:42:52.525498 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.0254832 +0000 UTC m=+147.132968153 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.527741 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" podStartSLOduration=126.527695758 podStartE2EDuration="2m6.527695758s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:52.513038345 +0000 UTC m=+146.620523298" watchObservedRunningTime="2025-10-03 08:42:52.527695758 +0000 UTC m=+146.635180711" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.533469 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jtp4v"] Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.534418 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jtp4v" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.538082 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.546207 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-8j6kq" podStartSLOduration=8.546191942 podStartE2EDuration="8.546191942s" podCreationTimestamp="2025-10-03 08:42:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:52.543522669 +0000 UTC m=+146.651007622" watchObservedRunningTime="2025-10-03 08:42:52.546191942 +0000 UTC m=+146.653676895" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.551284 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jtp4v"] Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.626864 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:52 crc kubenswrapper[4899]: E1003 08:42:52.627000 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.126977855 +0000 UTC m=+147.234462808 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.627115 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-775ml\" (UniqueName: \"kubernetes.io/projected/49764d57-9b3d-4097-a2f4-08a363c6a25f-kube-api-access-775ml\") pod \"community-operators-jtp4v\" (UID: \"49764d57-9b3d-4097-a2f4-08a363c6a25f\") " pod="openshift-marketplace/community-operators-jtp4v" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.627180 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49764d57-9b3d-4097-a2f4-08a363c6a25f-utilities\") pod \"community-operators-jtp4v\" (UID: \"49764d57-9b3d-4097-a2f4-08a363c6a25f\") " pod="openshift-marketplace/community-operators-jtp4v" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.627217 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49764d57-9b3d-4097-a2f4-08a363c6a25f-catalog-content\") pod \"community-operators-jtp4v\" (UID: \"49764d57-9b3d-4097-a2f4-08a363c6a25f\") " pod="openshift-marketplace/community-operators-jtp4v" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.627251 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:52 crc kubenswrapper[4899]: E1003 08:42:52.627519 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.127507261 +0000 UTC m=+147.234992214 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.703381 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5xplz"] Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.704562 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5xplz" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.708613 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.718403 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5xplz"] Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.728427 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:52 crc kubenswrapper[4899]: E1003 08:42:52.728677 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.228643025 +0000 UTC m=+147.336127978 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.728724 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-775ml\" (UniqueName: \"kubernetes.io/projected/49764d57-9b3d-4097-a2f4-08a363c6a25f-kube-api-access-775ml\") pod \"community-operators-jtp4v\" (UID: \"49764d57-9b3d-4097-a2f4-08a363c6a25f\") " pod="openshift-marketplace/community-operators-jtp4v" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.728767 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49764d57-9b3d-4097-a2f4-08a363c6a25f-utilities\") pod \"community-operators-jtp4v\" (UID: \"49764d57-9b3d-4097-a2f4-08a363c6a25f\") " pod="openshift-marketplace/community-operators-jtp4v" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.728792 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49764d57-9b3d-4097-a2f4-08a363c6a25f-catalog-content\") pod \"community-operators-jtp4v\" (UID: \"49764d57-9b3d-4097-a2f4-08a363c6a25f\") " pod="openshift-marketplace/community-operators-jtp4v" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.728818 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85fb2d07-4784-45b8-952e-2b12d61ea024-utilities\") pod \"certified-operators-5xplz\" (UID: \"85fb2d07-4784-45b8-952e-2b12d61ea024\") " pod="openshift-marketplace/certified-operators-5xplz" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.728851 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.728924 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pr49d\" (UniqueName: \"kubernetes.io/projected/85fb2d07-4784-45b8-952e-2b12d61ea024-kube-api-access-pr49d\") pod \"certified-operators-5xplz\" (UID: \"85fb2d07-4784-45b8-952e-2b12d61ea024\") " pod="openshift-marketplace/certified-operators-5xplz" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.728945 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85fb2d07-4784-45b8-952e-2b12d61ea024-catalog-content\") pod \"certified-operators-5xplz\" (UID: \"85fb2d07-4784-45b8-952e-2b12d61ea024\") " pod="openshift-marketplace/certified-operators-5xplz" Oct 03 08:42:52 crc kubenswrapper[4899]: E1003 08:42:52.729207 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.229195751 +0000 UTC m=+147.336680704 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.729236 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49764d57-9b3d-4097-a2f4-08a363c6a25f-catalog-content\") pod \"community-operators-jtp4v\" (UID: \"49764d57-9b3d-4097-a2f4-08a363c6a25f\") " pod="openshift-marketplace/community-operators-jtp4v" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.729287 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49764d57-9b3d-4097-a2f4-08a363c6a25f-utilities\") pod \"community-operators-jtp4v\" (UID: \"49764d57-9b3d-4097-a2f4-08a363c6a25f\") " pod="openshift-marketplace/community-operators-jtp4v" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.781156 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-775ml\" (UniqueName: \"kubernetes.io/projected/49764d57-9b3d-4097-a2f4-08a363c6a25f-kube-api-access-775ml\") pod \"community-operators-jtp4v\" (UID: \"49764d57-9b3d-4097-a2f4-08a363c6a25f\") " pod="openshift-marketplace/community-operators-jtp4v" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.829573 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:52 crc kubenswrapper[4899]: E1003 08:42:52.829744 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.329715056 +0000 UTC m=+147.437200009 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.830112 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85fb2d07-4784-45b8-952e-2b12d61ea024-utilities\") pod \"certified-operators-5xplz\" (UID: \"85fb2d07-4784-45b8-952e-2b12d61ea024\") " pod="openshift-marketplace/certified-operators-5xplz" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.830237 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.830339 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pr49d\" (UniqueName: \"kubernetes.io/projected/85fb2d07-4784-45b8-952e-2b12d61ea024-kube-api-access-pr49d\") pod \"certified-operators-5xplz\" (UID: \"85fb2d07-4784-45b8-952e-2b12d61ea024\") " pod="openshift-marketplace/certified-operators-5xplz" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.830451 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85fb2d07-4784-45b8-952e-2b12d61ea024-catalog-content\") pod \"certified-operators-5xplz\" (UID: \"85fb2d07-4784-45b8-952e-2b12d61ea024\") " pod="openshift-marketplace/certified-operators-5xplz" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.830545 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85fb2d07-4784-45b8-952e-2b12d61ea024-utilities\") pod \"certified-operators-5xplz\" (UID: \"85fb2d07-4784-45b8-952e-2b12d61ea024\") " pod="openshift-marketplace/certified-operators-5xplz" Oct 03 08:42:52 crc kubenswrapper[4899]: E1003 08:42:52.830574 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.330559652 +0000 UTC m=+147.438044605 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.830824 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85fb2d07-4784-45b8-952e-2b12d61ea024-catalog-content\") pod \"certified-operators-5xplz\" (UID: \"85fb2d07-4784-45b8-952e-2b12d61ea024\") " pod="openshift-marketplace/certified-operators-5xplz" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.852954 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jtp4v" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.854862 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pr49d\" (UniqueName: \"kubernetes.io/projected/85fb2d07-4784-45b8-952e-2b12d61ea024-kube-api-access-pr49d\") pod \"certified-operators-5xplz\" (UID: \"85fb2d07-4784-45b8-952e-2b12d61ea024\") " pod="openshift-marketplace/certified-operators-5xplz" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.928477 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-n9vg4"] Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.929420 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9vg4" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.934555 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:52 crc kubenswrapper[4899]: E1003 08:42:52.934695 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.434674439 +0000 UTC m=+147.542159392 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.934922 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.934979 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vht5n\" (UniqueName: \"kubernetes.io/projected/ea22580f-8ebf-46dc-adc0-c17174f4a096-kube-api-access-vht5n\") pod \"community-operators-n9vg4\" (UID: \"ea22580f-8ebf-46dc-adc0-c17174f4a096\") " pod="openshift-marketplace/community-operators-n9vg4" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.935006 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea22580f-8ebf-46dc-adc0-c17174f4a096-catalog-content\") pod \"community-operators-n9vg4\" (UID: \"ea22580f-8ebf-46dc-adc0-c17174f4a096\") " pod="openshift-marketplace/community-operators-n9vg4" Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.935063 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea22580f-8ebf-46dc-adc0-c17174f4a096-utilities\") pod \"community-operators-n9vg4\" (UID: \"ea22580f-8ebf-46dc-adc0-c17174f4a096\") " pod="openshift-marketplace/community-operators-n9vg4" Oct 03 08:42:52 crc kubenswrapper[4899]: E1003 08:42:52.935253 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.435236126 +0000 UTC m=+147.542721079 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:52 crc kubenswrapper[4899]: I1003 08:42:52.966334 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n9vg4"] Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.018484 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5xplz" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.037166 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.037592 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vht5n\" (UniqueName: \"kubernetes.io/projected/ea22580f-8ebf-46dc-adc0-c17174f4a096-kube-api-access-vht5n\") pod \"community-operators-n9vg4\" (UID: \"ea22580f-8ebf-46dc-adc0-c17174f4a096\") " pod="openshift-marketplace/community-operators-n9vg4" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.037626 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea22580f-8ebf-46dc-adc0-c17174f4a096-catalog-content\") pod \"community-operators-n9vg4\" (UID: \"ea22580f-8ebf-46dc-adc0-c17174f4a096\") " pod="openshift-marketplace/community-operators-n9vg4" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.037680 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea22580f-8ebf-46dc-adc0-c17174f4a096-utilities\") pod \"community-operators-n9vg4\" (UID: \"ea22580f-8ebf-46dc-adc0-c17174f4a096\") " pod="openshift-marketplace/community-operators-n9vg4" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.038624 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea22580f-8ebf-46dc-adc0-c17174f4a096-utilities\") pod \"community-operators-n9vg4\" (UID: \"ea22580f-8ebf-46dc-adc0-c17174f4a096\") " pod="openshift-marketplace/community-operators-n9vg4" Oct 03 08:42:53 crc kubenswrapper[4899]: E1003 08:42:53.038750 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.538730282 +0000 UTC m=+147.646215235 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.039371 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea22580f-8ebf-46dc-adc0-c17174f4a096-catalog-content\") pod \"community-operators-n9vg4\" (UID: \"ea22580f-8ebf-46dc-adc0-c17174f4a096\") " pod="openshift-marketplace/community-operators-n9vg4" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.067790 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vht5n\" (UniqueName: \"kubernetes.io/projected/ea22580f-8ebf-46dc-adc0-c17174f4a096-kube-api-access-vht5n\") pod \"community-operators-n9vg4\" (UID: \"ea22580f-8ebf-46dc-adc0-c17174f4a096\") " pod="openshift-marketplace/community-operators-n9vg4" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.107543 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9zxwg"] Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.108544 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9zxwg" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.111727 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9zxwg"] Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.139188 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.139230 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5013bd4-157b-4e1b-b1e7-df61922402cc-catalog-content\") pod \"certified-operators-9zxwg\" (UID: \"d5013bd4-157b-4e1b-b1e7-df61922402cc\") " pod="openshift-marketplace/certified-operators-9zxwg" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.139252 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5013bd4-157b-4e1b-b1e7-df61922402cc-utilities\") pod \"certified-operators-9zxwg\" (UID: \"d5013bd4-157b-4e1b-b1e7-df61922402cc\") " pod="openshift-marketplace/certified-operators-9zxwg" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.139331 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb68l\" (UniqueName: \"kubernetes.io/projected/d5013bd4-157b-4e1b-b1e7-df61922402cc-kube-api-access-rb68l\") pod \"certified-operators-9zxwg\" (UID: \"d5013bd4-157b-4e1b-b1e7-df61922402cc\") " pod="openshift-marketplace/certified-operators-9zxwg" Oct 03 08:42:53 crc kubenswrapper[4899]: E1003 08:42:53.139579 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.639567067 +0000 UTC m=+147.747052020 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.181231 4899 patch_prober.go:28] interesting pod/router-default-5444994796-tfknb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 08:42:53 crc kubenswrapper[4899]: [-]has-synced failed: reason withheld Oct 03 08:42:53 crc kubenswrapper[4899]: [+]process-running ok Oct 03 08:42:53 crc kubenswrapper[4899]: healthz check failed Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.181284 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tfknb" podUID="9c011ebc-362b-4893-9d7e-6a11a1d3e902" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.239969 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.240219 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb68l\" (UniqueName: \"kubernetes.io/projected/d5013bd4-157b-4e1b-b1e7-df61922402cc-kube-api-access-rb68l\") pod \"certified-operators-9zxwg\" (UID: \"d5013bd4-157b-4e1b-b1e7-df61922402cc\") " pod="openshift-marketplace/certified-operators-9zxwg" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.240290 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5013bd4-157b-4e1b-b1e7-df61922402cc-catalog-content\") pod \"certified-operators-9zxwg\" (UID: \"d5013bd4-157b-4e1b-b1e7-df61922402cc\") " pod="openshift-marketplace/certified-operators-9zxwg" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.240320 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5013bd4-157b-4e1b-b1e7-df61922402cc-utilities\") pod \"certified-operators-9zxwg\" (UID: \"d5013bd4-157b-4e1b-b1e7-df61922402cc\") " pod="openshift-marketplace/certified-operators-9zxwg" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.240858 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5013bd4-157b-4e1b-b1e7-df61922402cc-utilities\") pod \"certified-operators-9zxwg\" (UID: \"d5013bd4-157b-4e1b-b1e7-df61922402cc\") " pod="openshift-marketplace/certified-operators-9zxwg" Oct 03 08:42:53 crc kubenswrapper[4899]: E1003 08:42:53.241586 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.741558727 +0000 UTC m=+147.849043690 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.241923 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5013bd4-157b-4e1b-b1e7-df61922402cc-catalog-content\") pod \"certified-operators-9zxwg\" (UID: \"d5013bd4-157b-4e1b-b1e7-df61922402cc\") " pod="openshift-marketplace/certified-operators-9zxwg" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.242459 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jtp4v"] Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.256148 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9vg4" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.275950 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb68l\" (UniqueName: \"kubernetes.io/projected/d5013bd4-157b-4e1b-b1e7-df61922402cc-kube-api-access-rb68l\") pod \"certified-operators-9zxwg\" (UID: \"d5013bd4-157b-4e1b-b1e7-df61922402cc\") " pod="openshift-marketplace/certified-operators-9zxwg" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.342102 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.342139 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.342178 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.342234 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:53 crc kubenswrapper[4899]: E1003 08:42:53.342510 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.842497784 +0000 UTC m=+147.949982727 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.343515 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.347882 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.348454 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.386382 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kgddg" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.402252 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5xplz"] Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.429239 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9zxwg" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.440447 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.444309 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.444560 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:53 crc kubenswrapper[4899]: E1003 08:42:53.444710 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:53.9446813 +0000 UTC m=+148.052166253 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.449470 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.451496 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.459793 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.486275 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xplz" event={"ID":"85fb2d07-4784-45b8-952e-2b12d61ea024","Type":"ContainerStarted","Data":"993b67686c8e101800c62af21ea4829014eb07b6f974ea9fba3537ab24ff55cd"} Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.490283 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jtp4v" event={"ID":"49764d57-9b3d-4097-a2f4-08a363c6a25f","Type":"ContainerStarted","Data":"acace7f15f38368b237a8025c6ea0772fe22591fd4d5c205ecc4ef1cd771bc42"} Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.490907 4899 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-np5qp container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.42:8080/healthz\": dial tcp 10.217.0.42:8080: connect: connection refused" start-of-body= Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.490950 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" podUID="a4456e30-3eb3-4e1a-b22d-8888babf06a9" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.42:8080/healthz\": dial tcp 10.217.0.42:8080: connect: connection refused" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.491053 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-8j6kq" Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.546006 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:53 crc kubenswrapper[4899]: E1003 08:42:53.547784 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:54.047772515 +0000 UTC m=+148.155257468 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.647688 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:53 crc kubenswrapper[4899]: E1003 08:42:53.648284 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:54.148267639 +0000 UTC m=+148.255752592 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.749582 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:53 crc kubenswrapper[4899]: E1003 08:42:53.750026 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:54.25000873 +0000 UTC m=+148.357493683 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.829049 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n9vg4"] Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.852155 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:53 crc kubenswrapper[4899]: E1003 08:42:53.852695 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:54.352677262 +0000 UTC m=+148.460162215 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:53 crc kubenswrapper[4899]: W1003 08:42:53.890449 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-3726450f5cdb4192c563e889107e4436d571e96f5b562ed15ef3796b5223bbb5 WatchSource:0}: Error finding container 3726450f5cdb4192c563e889107e4436d571e96f5b562ed15ef3796b5223bbb5: Status 404 returned error can't find the container with id 3726450f5cdb4192c563e889107e4436d571e96f5b562ed15ef3796b5223bbb5 Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.935777 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9zxwg"] Oct 03 08:42:53 crc kubenswrapper[4899]: I1003 08:42:53.955655 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:53 crc kubenswrapper[4899]: E1003 08:42:53.957177 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:54.457156498 +0000 UTC m=+148.564641451 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:53 crc kubenswrapper[4899]: W1003 08:42:53.966626 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd5013bd4_157b_4e1b_b1e7_df61922402cc.slice/crio-2577889c44cc7511e5b456057423e89e37eb8eb6ad18ff61b156772fd62c9a3b WatchSource:0}: Error finding container 2577889c44cc7511e5b456057423e89e37eb8eb6ad18ff61b156772fd62c9a3b: Status 404 returned error can't find the container with id 2577889c44cc7511e5b456057423e89e37eb8eb6ad18ff61b156772fd62c9a3b Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.024533 4899 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.056465 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:54 crc kubenswrapper[4899]: E1003 08:42:54.056791 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:54.556774505 +0000 UTC m=+148.664259458 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.160635 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:54 crc kubenswrapper[4899]: E1003 08:42:54.161312 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:54.661300244 +0000 UTC m=+148.768785197 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.176832 4899 patch_prober.go:28] interesting pod/router-default-5444994796-tfknb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 08:42:54 crc kubenswrapper[4899]: [-]has-synced failed: reason withheld Oct 03 08:42:54 crc kubenswrapper[4899]: [+]process-running ok Oct 03 08:42:54 crc kubenswrapper[4899]: healthz check failed Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.176923 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tfknb" podUID="9c011ebc-362b-4893-9d7e-6a11a1d3e902" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.262109 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:54 crc kubenswrapper[4899]: E1003 08:42:54.262408 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:54.762393326 +0000 UTC m=+148.869878279 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.363664 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:54 crc kubenswrapper[4899]: E1003 08:42:54.364012 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:54.863996494 +0000 UTC m=+148.971481447 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.464445 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:54 crc kubenswrapper[4899]: E1003 08:42:54.464725 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:54.964709855 +0000 UTC m=+149.072194808 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.495851 4899 generic.go:334] "Generic (PLEG): container finished" podID="85fb2d07-4784-45b8-952e-2b12d61ea024" containerID="ffb9ed9642f199e0f72618a43fd861d41ffbd270bbb93ab04116fd28be218963" exitCode=0 Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.495919 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xplz" event={"ID":"85fb2d07-4784-45b8-952e-2b12d61ea024","Type":"ContainerDied","Data":"ffb9ed9642f199e0f72618a43fd861d41ffbd270bbb93ab04116fd28be218963"} Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.497988 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jtp4v" event={"ID":"49764d57-9b3d-4097-a2f4-08a363c6a25f","Type":"ContainerDied","Data":"da827364a5ac1fe0474ef66174ac298f0beac493eebbf6444c87ed093a2a53f0"} Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.497937 4899 generic.go:334] "Generic (PLEG): container finished" podID="49764d57-9b3d-4097-a2f4-08a363c6a25f" containerID="da827364a5ac1fe0474ef66174ac298f0beac493eebbf6444c87ed093a2a53f0" exitCode=0 Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.498037 4899 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.500392 4899 generic.go:334] "Generic (PLEG): container finished" podID="d5013bd4-157b-4e1b-b1e7-df61922402cc" containerID="94190176e53912947674ea3df2e1cb5d855caa22b75380923ec4473c72235a32" exitCode=0 Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.500488 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9zxwg" event={"ID":"d5013bd4-157b-4e1b-b1e7-df61922402cc","Type":"ContainerDied","Data":"94190176e53912947674ea3df2e1cb5d855caa22b75380923ec4473c72235a32"} Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.500555 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9zxwg" event={"ID":"d5013bd4-157b-4e1b-b1e7-df61922402cc","Type":"ContainerStarted","Data":"2577889c44cc7511e5b456057423e89e37eb8eb6ad18ff61b156772fd62c9a3b"} Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.505454 4899 generic.go:334] "Generic (PLEG): container finished" podID="ea22580f-8ebf-46dc-adc0-c17174f4a096" containerID="7fcb56b1f35756dbe1c702e923e168abd18461fb7607404a44ccc49fb889bbf0" exitCode=0 Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.505559 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9vg4" event={"ID":"ea22580f-8ebf-46dc-adc0-c17174f4a096","Type":"ContainerDied","Data":"7fcb56b1f35756dbe1c702e923e168abd18461fb7607404a44ccc49fb889bbf0"} Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.505611 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9vg4" event={"ID":"ea22580f-8ebf-46dc-adc0-c17174f4a096","Type":"ContainerStarted","Data":"c55dc1d615753824e903f57104c46a9221bf368f62b96ca71ef00dc67fb9ac51"} Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.512317 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" event={"ID":"331490c6-4b8d-44cd-9081-5d9a7db1942a","Type":"ContainerStarted","Data":"344aa80bea5c4ebb2accccc81c0428aa08a119e62ffb7efd0215e67ee1c73cc8"} Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.512357 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" event={"ID":"331490c6-4b8d-44cd-9081-5d9a7db1942a","Type":"ContainerStarted","Data":"59b724017ded5fc70422d8ac3e2f9fb6bf8d14b70d0253c496e117979934538e"} Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.512370 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" event={"ID":"331490c6-4b8d-44cd-9081-5d9a7db1942a","Type":"ContainerStarted","Data":"16b54b26e74ad1ae37353b04162e6e7d192558702e7b523c6925795bdef0fbaf"} Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.514792 4899 generic.go:334] "Generic (PLEG): container finished" podID="264bb2e1-d946-4b30-9aa8-48cb9a9447e5" containerID="c71db21c2b595c777be240aa64dd0636db22025cd3ee053c30f92ff51a8efc4b" exitCode=0 Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.515148 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr" event={"ID":"264bb2e1-d946-4b30-9aa8-48cb9a9447e5","Type":"ContainerDied","Data":"c71db21c2b595c777be240aa64dd0636db22025cd3ee053c30f92ff51a8efc4b"} Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.518823 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"43e9c48f1ff67a63f7100ccb22aa92300b01ecbfee209a5e6acee66a941b340c"} Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.518866 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"0bbad48841e67100d7787e9901944ccce6e912ed18e3fccacfe947857569620c"} Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.522413 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"08ce5440a2c22580232105d0a7f2b6dde3937db613df6b3fb0e02b2699275f95"} Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.522534 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"3726450f5cdb4192c563e889107e4436d571e96f5b562ed15ef3796b5223bbb5"} Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.523809 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"8cb193bb4dcb6447ec8a7f66c526e1fe7da53aa2e881afb4e4d9b8aef0a83df5"} Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.523882 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"6649e5fb3326c981e20f2f79041f15581bfdb2495f2517bcfaa39e71c9ad31d7"} Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.566084 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:54 crc kubenswrapper[4899]: E1003 08:42:54.566386 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:55.066373945 +0000 UTC m=+149.173858898 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.585500 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-k9nrk" podStartSLOduration=10.585482467 podStartE2EDuration="10.585482467s" podCreationTimestamp="2025-10-03 08:42:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:54.58431995 +0000 UTC m=+148.691804903" watchObservedRunningTime="2025-10-03 08:42:54.585482467 +0000 UTC m=+148.692967420" Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.666742 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:54 crc kubenswrapper[4899]: E1003 08:42:54.666819 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 08:42:55.166802536 +0000 UTC m=+149.274287489 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.666994 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:54 crc kubenswrapper[4899]: E1003 08:42:54.667313 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 08:42:55.167302752 +0000 UTC m=+149.274787705 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l5vs7" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.695481 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4lpnw"] Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.696711 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4lpnw" Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.698763 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.705765 4899 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-03T08:42:54.024559377Z","Handler":null,"Name":""} Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.707160 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4lpnw"] Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.718993 4899 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.719027 4899 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.767814 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.768050 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad6ce9b2-60bf-492e-b583-9147a171e7bd-catalog-content\") pod \"redhat-marketplace-4lpnw\" (UID: \"ad6ce9b2-60bf-492e-b583-9147a171e7bd\") " pod="openshift-marketplace/redhat-marketplace-4lpnw" Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.768074 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad6ce9b2-60bf-492e-b583-9147a171e7bd-utilities\") pod \"redhat-marketplace-4lpnw\" (UID: \"ad6ce9b2-60bf-492e-b583-9147a171e7bd\") " pod="openshift-marketplace/redhat-marketplace-4lpnw" Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.768133 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7p6pj\" (UniqueName: \"kubernetes.io/projected/ad6ce9b2-60bf-492e-b583-9147a171e7bd-kube-api-access-7p6pj\") pod \"redhat-marketplace-4lpnw\" (UID: \"ad6ce9b2-60bf-492e-b583-9147a171e7bd\") " pod="openshift-marketplace/redhat-marketplace-4lpnw" Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.772464 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.868809 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7p6pj\" (UniqueName: \"kubernetes.io/projected/ad6ce9b2-60bf-492e-b583-9147a171e7bd-kube-api-access-7p6pj\") pod \"redhat-marketplace-4lpnw\" (UID: \"ad6ce9b2-60bf-492e-b583-9147a171e7bd\") " pod="openshift-marketplace/redhat-marketplace-4lpnw" Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.869063 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.869144 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad6ce9b2-60bf-492e-b583-9147a171e7bd-catalog-content\") pod \"redhat-marketplace-4lpnw\" (UID: \"ad6ce9b2-60bf-492e-b583-9147a171e7bd\") " pod="openshift-marketplace/redhat-marketplace-4lpnw" Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.869247 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad6ce9b2-60bf-492e-b583-9147a171e7bd-utilities\") pod \"redhat-marketplace-4lpnw\" (UID: \"ad6ce9b2-60bf-492e-b583-9147a171e7bd\") " pod="openshift-marketplace/redhat-marketplace-4lpnw" Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.869609 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad6ce9b2-60bf-492e-b583-9147a171e7bd-utilities\") pod \"redhat-marketplace-4lpnw\" (UID: \"ad6ce9b2-60bf-492e-b583-9147a171e7bd\") " pod="openshift-marketplace/redhat-marketplace-4lpnw" Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.869823 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad6ce9b2-60bf-492e-b583-9147a171e7bd-catalog-content\") pod \"redhat-marketplace-4lpnw\" (UID: \"ad6ce9b2-60bf-492e-b583-9147a171e7bd\") " pod="openshift-marketplace/redhat-marketplace-4lpnw" Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.872082 4899 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.872107 4899 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.889051 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7p6pj\" (UniqueName: \"kubernetes.io/projected/ad6ce9b2-60bf-492e-b583-9147a171e7bd-kube-api-access-7p6pj\") pod \"redhat-marketplace-4lpnw\" (UID: \"ad6ce9b2-60bf-492e-b583-9147a171e7bd\") " pod="openshift-marketplace/redhat-marketplace-4lpnw" Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.898363 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l5vs7\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:54 crc kubenswrapper[4899]: I1003 08:42:54.966970 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.011500 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4lpnw" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.099801 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-b7jmv"] Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.101603 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b7jmv" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.134597 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b7jmv"] Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.173748 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f9d59c-b58a-4c3a-a9a8-7377c5d17273-utilities\") pod \"redhat-marketplace-b7jmv\" (UID: \"81f9d59c-b58a-4c3a-a9a8-7377c5d17273\") " pod="openshift-marketplace/redhat-marketplace-b7jmv" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.173828 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f9d59c-b58a-4c3a-a9a8-7377c5d17273-catalog-content\") pod \"redhat-marketplace-b7jmv\" (UID: \"81f9d59c-b58a-4c3a-a9a8-7377c5d17273\") " pod="openshift-marketplace/redhat-marketplace-b7jmv" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.173854 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9lb6\" (UniqueName: \"kubernetes.io/projected/81f9d59c-b58a-4c3a-a9a8-7377c5d17273-kube-api-access-z9lb6\") pod \"redhat-marketplace-b7jmv\" (UID: \"81f9d59c-b58a-4c3a-a9a8-7377c5d17273\") " pod="openshift-marketplace/redhat-marketplace-b7jmv" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.177418 4899 patch_prober.go:28] interesting pod/router-default-5444994796-tfknb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 08:42:55 crc kubenswrapper[4899]: [-]has-synced failed: reason withheld Oct 03 08:42:55 crc kubenswrapper[4899]: [+]process-running ok Oct 03 08:42:55 crc kubenswrapper[4899]: healthz check failed Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.177504 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tfknb" podUID="9c011ebc-362b-4893-9d7e-6a11a1d3e902" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.286429 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f9d59c-b58a-4c3a-a9a8-7377c5d17273-utilities\") pod \"redhat-marketplace-b7jmv\" (UID: \"81f9d59c-b58a-4c3a-a9a8-7377c5d17273\") " pod="openshift-marketplace/redhat-marketplace-b7jmv" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.286602 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f9d59c-b58a-4c3a-a9a8-7377c5d17273-catalog-content\") pod \"redhat-marketplace-b7jmv\" (UID: \"81f9d59c-b58a-4c3a-a9a8-7377c5d17273\") " pod="openshift-marketplace/redhat-marketplace-b7jmv" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.286645 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9lb6\" (UniqueName: \"kubernetes.io/projected/81f9d59c-b58a-4c3a-a9a8-7377c5d17273-kube-api-access-z9lb6\") pod \"redhat-marketplace-b7jmv\" (UID: \"81f9d59c-b58a-4c3a-a9a8-7377c5d17273\") " pod="openshift-marketplace/redhat-marketplace-b7jmv" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.287536 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f9d59c-b58a-4c3a-a9a8-7377c5d17273-utilities\") pod \"redhat-marketplace-b7jmv\" (UID: \"81f9d59c-b58a-4c3a-a9a8-7377c5d17273\") " pod="openshift-marketplace/redhat-marketplace-b7jmv" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.287820 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f9d59c-b58a-4c3a-a9a8-7377c5d17273-catalog-content\") pod \"redhat-marketplace-b7jmv\" (UID: \"81f9d59c-b58a-4c3a-a9a8-7377c5d17273\") " pod="openshift-marketplace/redhat-marketplace-b7jmv" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.289199 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l5vs7"] Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.315533 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9lb6\" (UniqueName: \"kubernetes.io/projected/81f9d59c-b58a-4c3a-a9a8-7377c5d17273-kube-api-access-z9lb6\") pod \"redhat-marketplace-b7jmv\" (UID: \"81f9d59c-b58a-4c3a-a9a8-7377c5d17273\") " pod="openshift-marketplace/redhat-marketplace-b7jmv" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.315927 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4lpnw"] Oct 03 08:42:55 crc kubenswrapper[4899]: W1003 08:42:55.340772 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podad6ce9b2_60bf_492e_b583_9147a171e7bd.slice/crio-779d62d47eecb63473b8ce6e5662ae05b55d05674203669837a5705c3cf08ed0 WatchSource:0}: Error finding container 779d62d47eecb63473b8ce6e5662ae05b55d05674203669837a5705c3cf08ed0: Status 404 returned error can't find the container with id 779d62d47eecb63473b8ce6e5662ae05b55d05674203669837a5705c3cf08ed0 Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.434152 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b7jmv" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.536311 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" event={"ID":"aee31d1e-9049-4b14-9544-e26bc3ea2b38","Type":"ContainerStarted","Data":"908a4b69aba1aa56fd8e0d2838bf125308a8a55589a0a3213076b22c27cbb5ed"} Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.536355 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" event={"ID":"aee31d1e-9049-4b14-9544-e26bc3ea2b38","Type":"ContainerStarted","Data":"c74ed861137a1aa2151497bfd322a678dc0efd6429f1b187cc553e8b5147beb8"} Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.536553 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.567307 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" podStartSLOduration=129.567288387 podStartE2EDuration="2m9.567288387s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:42:55.565604684 +0000 UTC m=+149.673089627" watchObservedRunningTime="2025-10-03 08:42:55.567288387 +0000 UTC m=+149.674773340" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.579626 4899 generic.go:334] "Generic (PLEG): container finished" podID="ad6ce9b2-60bf-492e-b583-9147a171e7bd" containerID="34307bec13d1158bfbe16358576caf750ba815e79710241defc4a5a04b4bd6a4" exitCode=0 Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.580037 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4lpnw" event={"ID":"ad6ce9b2-60bf-492e-b583-9147a171e7bd","Type":"ContainerDied","Data":"34307bec13d1158bfbe16358576caf750ba815e79710241defc4a5a04b4bd6a4"} Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.580073 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4lpnw" event={"ID":"ad6ce9b2-60bf-492e-b583-9147a171e7bd","Type":"ContainerStarted","Data":"779d62d47eecb63473b8ce6e5662ae05b55d05674203669837a5705c3cf08ed0"} Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.667208 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b7jmv"] Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.705448 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b99vh"] Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.707160 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b99vh" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.712289 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.717941 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b99vh"] Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.894928 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkn7n\" (UniqueName: \"kubernetes.io/projected/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f-kube-api-access-kkn7n\") pod \"redhat-operators-b99vh\" (UID: \"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f\") " pod="openshift-marketplace/redhat-operators-b99vh" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.895026 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f-utilities\") pod \"redhat-operators-b99vh\" (UID: \"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f\") " pod="openshift-marketplace/redhat-operators-b99vh" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.895073 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f-catalog-content\") pod \"redhat-operators-b99vh\" (UID: \"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f\") " pod="openshift-marketplace/redhat-operators-b99vh" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.996472 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f-catalog-content\") pod \"redhat-operators-b99vh\" (UID: \"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f\") " pod="openshift-marketplace/redhat-operators-b99vh" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.996544 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkn7n\" (UniqueName: \"kubernetes.io/projected/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f-kube-api-access-kkn7n\") pod \"redhat-operators-b99vh\" (UID: \"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f\") " pod="openshift-marketplace/redhat-operators-b99vh" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.996612 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f-utilities\") pod \"redhat-operators-b99vh\" (UID: \"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f\") " pod="openshift-marketplace/redhat-operators-b99vh" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.997417 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f-catalog-content\") pod \"redhat-operators-b99vh\" (UID: \"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f\") " pod="openshift-marketplace/redhat-operators-b99vh" Oct 03 08:42:55 crc kubenswrapper[4899]: I1003 08:42:55.999326 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f-utilities\") pod \"redhat-operators-b99vh\" (UID: \"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f\") " pod="openshift-marketplace/redhat-operators-b99vh" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.020365 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkn7n\" (UniqueName: \"kubernetes.io/projected/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f-kube-api-access-kkn7n\") pod \"redhat-operators-b99vh\" (UID: \"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f\") " pod="openshift-marketplace/redhat-operators-b99vh" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.021584 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.039531 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b99vh" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.085651 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.098258 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-v5crb" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.119977 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tnk22"] Oct 03 08:42:56 crc kubenswrapper[4899]: E1003 08:42:56.120310 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="264bb2e1-d946-4b30-9aa8-48cb9a9447e5" containerName="collect-profiles" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.120325 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="264bb2e1-d946-4b30-9aa8-48cb9a9447e5" containerName="collect-profiles" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.120455 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="264bb2e1-d946-4b30-9aa8-48cb9a9447e5" containerName="collect-profiles" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.121369 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnk22" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.153825 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tnk22"] Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.167064 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.167707 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.172988 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.176045 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.183024 4899 patch_prober.go:28] interesting pod/router-default-5444994796-tfknb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 08:42:56 crc kubenswrapper[4899]: [-]has-synced failed: reason withheld Oct 03 08:42:56 crc kubenswrapper[4899]: [+]process-running ok Oct 03 08:42:56 crc kubenswrapper[4899]: healthz check failed Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.183087 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tfknb" podUID="9c011ebc-362b-4893-9d7e-6a11a1d3e902" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.193850 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.199551 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/264bb2e1-d946-4b30-9aa8-48cb9a9447e5-secret-volume\") pod \"264bb2e1-d946-4b30-9aa8-48cb9a9447e5\" (UID: \"264bb2e1-d946-4b30-9aa8-48cb9a9447e5\") " Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.199608 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/264bb2e1-d946-4b30-9aa8-48cb9a9447e5-config-volume\") pod \"264bb2e1-d946-4b30-9aa8-48cb9a9447e5\" (UID: \"264bb2e1-d946-4b30-9aa8-48cb9a9447e5\") " Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.199630 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5cs8\" (UniqueName: \"kubernetes.io/projected/264bb2e1-d946-4b30-9aa8-48cb9a9447e5-kube-api-access-d5cs8\") pod \"264bb2e1-d946-4b30-9aa8-48cb9a9447e5\" (UID: \"264bb2e1-d946-4b30-9aa8-48cb9a9447e5\") " Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.203842 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/264bb2e1-d946-4b30-9aa8-48cb9a9447e5-config-volume" (OuterVolumeSpecName: "config-volume") pod "264bb2e1-d946-4b30-9aa8-48cb9a9447e5" (UID: "264bb2e1-d946-4b30-9aa8-48cb9a9447e5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.229390 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/264bb2e1-d946-4b30-9aa8-48cb9a9447e5-kube-api-access-d5cs8" (OuterVolumeSpecName: "kube-api-access-d5cs8") pod "264bb2e1-d946-4b30-9aa8-48cb9a9447e5" (UID: "264bb2e1-d946-4b30-9aa8-48cb9a9447e5"). InnerVolumeSpecName "kube-api-access-d5cs8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.231981 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/264bb2e1-d946-4b30-9aa8-48cb9a9447e5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "264bb2e1-d946-4b30-9aa8-48cb9a9447e5" (UID: "264bb2e1-d946-4b30-9aa8-48cb9a9447e5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.300842 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/321b82b3-0aa0-409d-8a8a-8ea91085f407-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"321b82b3-0aa0-409d-8a8a-8ea91085f407\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.300938 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a5429d2-4cf0-4d34-9bac-fee4008bd409-catalog-content\") pod \"redhat-operators-tnk22\" (UID: \"9a5429d2-4cf0-4d34-9bac-fee4008bd409\") " pod="openshift-marketplace/redhat-operators-tnk22" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.300998 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a5429d2-4cf0-4d34-9bac-fee4008bd409-utilities\") pod \"redhat-operators-tnk22\" (UID: \"9a5429d2-4cf0-4d34-9bac-fee4008bd409\") " pod="openshift-marketplace/redhat-operators-tnk22" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.301024 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjjrt\" (UniqueName: \"kubernetes.io/projected/9a5429d2-4cf0-4d34-9bac-fee4008bd409-kube-api-access-rjjrt\") pod \"redhat-operators-tnk22\" (UID: \"9a5429d2-4cf0-4d34-9bac-fee4008bd409\") " pod="openshift-marketplace/redhat-operators-tnk22" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.301049 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/321b82b3-0aa0-409d-8a8a-8ea91085f407-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"321b82b3-0aa0-409d-8a8a-8ea91085f407\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.301095 4899 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/264bb2e1-d946-4b30-9aa8-48cb9a9447e5-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.301112 4899 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/264bb2e1-d946-4b30-9aa8-48cb9a9447e5-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.301124 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5cs8\" (UniqueName: \"kubernetes.io/projected/264bb2e1-d946-4b30-9aa8-48cb9a9447e5-kube-api-access-d5cs8\") on node \"crc\" DevicePath \"\"" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.402200 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/321b82b3-0aa0-409d-8a8a-8ea91085f407-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"321b82b3-0aa0-409d-8a8a-8ea91085f407\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.402254 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a5429d2-4cf0-4d34-9bac-fee4008bd409-catalog-content\") pod \"redhat-operators-tnk22\" (UID: \"9a5429d2-4cf0-4d34-9bac-fee4008bd409\") " pod="openshift-marketplace/redhat-operators-tnk22" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.402304 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a5429d2-4cf0-4d34-9bac-fee4008bd409-utilities\") pod \"redhat-operators-tnk22\" (UID: \"9a5429d2-4cf0-4d34-9bac-fee4008bd409\") " pod="openshift-marketplace/redhat-operators-tnk22" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.402325 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjjrt\" (UniqueName: \"kubernetes.io/projected/9a5429d2-4cf0-4d34-9bac-fee4008bd409-kube-api-access-rjjrt\") pod \"redhat-operators-tnk22\" (UID: \"9a5429d2-4cf0-4d34-9bac-fee4008bd409\") " pod="openshift-marketplace/redhat-operators-tnk22" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.402341 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/321b82b3-0aa0-409d-8a8a-8ea91085f407-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"321b82b3-0aa0-409d-8a8a-8ea91085f407\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.402391 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/321b82b3-0aa0-409d-8a8a-8ea91085f407-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"321b82b3-0aa0-409d-8a8a-8ea91085f407\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.402854 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a5429d2-4cf0-4d34-9bac-fee4008bd409-utilities\") pod \"redhat-operators-tnk22\" (UID: \"9a5429d2-4cf0-4d34-9bac-fee4008bd409\") " pod="openshift-marketplace/redhat-operators-tnk22" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.403062 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a5429d2-4cf0-4d34-9bac-fee4008bd409-catalog-content\") pod \"redhat-operators-tnk22\" (UID: \"9a5429d2-4cf0-4d34-9bac-fee4008bd409\") " pod="openshift-marketplace/redhat-operators-tnk22" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.422794 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjjrt\" (UniqueName: \"kubernetes.io/projected/9a5429d2-4cf0-4d34-9bac-fee4008bd409-kube-api-access-rjjrt\") pod \"redhat-operators-tnk22\" (UID: \"9a5429d2-4cf0-4d34-9bac-fee4008bd409\") " pod="openshift-marketplace/redhat-operators-tnk22" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.429403 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/321b82b3-0aa0-409d-8a8a-8ea91085f407-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"321b82b3-0aa0-409d-8a8a-8ea91085f407\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.491763 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnk22" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.503596 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b99vh"] Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.525068 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.526367 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.530044 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.530212 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.545988 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.552196 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.553082 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.564213 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.565098 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.569555 4899 patch_prober.go:28] interesting pod/console-f9d7485db-k28dm container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.9:8443/health\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.569595 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-k28dm" podUID="633aedb3-7eca-4c2c-b6c3-69a0f7c4787d" containerName="console" probeResult="failure" output="Get \"https://10.217.0.9:8443/health\": dial tcp 10.217.0.9:8443: connect: connection refused" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.597870 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr" event={"ID":"264bb2e1-d946-4b30-9aa8-48cb9a9447e5","Type":"ContainerDied","Data":"81c883c1c318c30266fa5b224eec3205d4756eadf2cc73dc64359d4b3177af74"} Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.597938 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81c883c1c318c30266fa5b224eec3205d4756eadf2cc73dc64359d4b3177af74" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.598045 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.604263 4899 generic.go:334] "Generic (PLEG): container finished" podID="81f9d59c-b58a-4c3a-a9a8-7377c5d17273" containerID="56997bd971cb15f0ccd53568145f017557ad239fbec0724160dfa71cfa107d7c" exitCode=0 Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.604472 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b7jmv" event={"ID":"81f9d59c-b58a-4c3a-a9a8-7377c5d17273","Type":"ContainerDied","Data":"56997bd971cb15f0ccd53568145f017557ad239fbec0724160dfa71cfa107d7c"} Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.604537 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b7jmv" event={"ID":"81f9d59c-b58a-4c3a-a9a8-7377c5d17273","Type":"ContainerStarted","Data":"e828ad7e0ef82087ebbc206026ff44fae9a8d426a21bd2ed12075cc2e75ace60"} Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.607806 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b99vh" event={"ID":"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f","Type":"ContainerStarted","Data":"3f94b371a270af50e326d00cc32e3d58101a2533ba2b25a6090608d534b7623b"} Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.706365 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/822debfe-b242-48f7-82f3-5f35d3285775-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"822debfe-b242-48f7-82f3-5f35d3285775\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.706447 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/822debfe-b242-48f7-82f3-5f35d3285775-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"822debfe-b242-48f7-82f3-5f35d3285775\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.808804 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/822debfe-b242-48f7-82f3-5f35d3285775-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"822debfe-b242-48f7-82f3-5f35d3285775\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.809125 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/822debfe-b242-48f7-82f3-5f35d3285775-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"822debfe-b242-48f7-82f3-5f35d3285775\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.809277 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/822debfe-b242-48f7-82f3-5f35d3285775-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"822debfe-b242-48f7-82f3-5f35d3285775\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.833629 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/822debfe-b242-48f7-82f3-5f35d3285775-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"822debfe-b242-48f7-82f3-5f35d3285775\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.860767 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.919069 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tnk22"] Oct 03 08:42:56 crc kubenswrapper[4899]: I1003 08:42:56.977056 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.174026 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.177702 4899 patch_prober.go:28] interesting pod/router-default-5444994796-tfknb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 08:42:57 crc kubenswrapper[4899]: [-]has-synced failed: reason withheld Oct 03 08:42:57 crc kubenswrapper[4899]: [+]process-running ok Oct 03 08:42:57 crc kubenswrapper[4899]: healthz check failed Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.177746 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tfknb" podUID="9c011ebc-362b-4893-9d7e-6a11a1d3e902" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.219138 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5mh7r" Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.290237 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 03 08:42:57 crc kubenswrapper[4899]: W1003 08:42:57.301332 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod822debfe_b242_48f7_82f3_5f35d3285775.slice/crio-a63c28a054e882a9a3495926f6bd318383daecb638ec7626c84aa461e289067e WatchSource:0}: Error finding container a63c28a054e882a9a3495926f6bd318383daecb638ec7626c84aa461e289067e: Status 404 returned error can't find the container with id a63c28a054e882a9a3495926f6bd318383daecb638ec7626c84aa461e289067e Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.453271 4899 patch_prober.go:28] interesting pod/downloads-7954f5f757-dpwsf container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" start-of-body= Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.453662 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-dpwsf" podUID="2206e9a3-b591-4d0a-aa09-9dc5b2e54342" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.453269 4899 patch_prober.go:28] interesting pod/downloads-7954f5f757-dpwsf container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" start-of-body= Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.454303 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dpwsf" podUID="2206e9a3-b591-4d0a-aa09-9dc5b2e54342" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.500427 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.507651 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5hl5h" Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.589990 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.669693 4899 generic.go:334] "Generic (PLEG): container finished" podID="9a5429d2-4cf0-4d34-9bac-fee4008bd409" containerID="e18e88fcd01b1eebbc5191d126ed4f3f7113c9d482bb45d162509b54aee9e802" exitCode=0 Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.669777 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnk22" event={"ID":"9a5429d2-4cf0-4d34-9bac-fee4008bd409","Type":"ContainerDied","Data":"e18e88fcd01b1eebbc5191d126ed4f3f7113c9d482bb45d162509b54aee9e802"} Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.669809 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnk22" event={"ID":"9a5429d2-4cf0-4d34-9bac-fee4008bd409","Type":"ContainerStarted","Data":"411aaf22d20388fdf35ebbed6638b0feb5794b441f1e19cc6b1672f8b89c82cd"} Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.707210 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"822debfe-b242-48f7-82f3-5f35d3285775","Type":"ContainerStarted","Data":"a63c28a054e882a9a3495926f6bd318383daecb638ec7626c84aa461e289067e"} Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.741764 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"321b82b3-0aa0-409d-8a8a-8ea91085f407","Type":"ContainerStarted","Data":"004b7b51b058836ad57ca80412017b679bbbb8479ea2f0c9a41823a4e86119fb"} Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.771146 4899 generic.go:334] "Generic (PLEG): container finished" podID="4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f" containerID="03a6fb695d6b4e00a8ae647b0e29105c8fd46566df721ee3d2bbf9489fc34460" exitCode=0 Oct 03 08:42:57 crc kubenswrapper[4899]: I1003 08:42:57.773429 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b99vh" event={"ID":"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f","Type":"ContainerDied","Data":"03a6fb695d6b4e00a8ae647b0e29105c8fd46566df721ee3d2bbf9489fc34460"} Oct 03 08:42:58 crc kubenswrapper[4899]: I1003 08:42:58.177385 4899 patch_prober.go:28] interesting pod/router-default-5444994796-tfknb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 08:42:58 crc kubenswrapper[4899]: [-]has-synced failed: reason withheld Oct 03 08:42:58 crc kubenswrapper[4899]: [+]process-running ok Oct 03 08:42:58 crc kubenswrapper[4899]: healthz check failed Oct 03 08:42:58 crc kubenswrapper[4899]: I1003 08:42:58.177742 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tfknb" podUID="9c011ebc-362b-4893-9d7e-6a11a1d3e902" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 08:42:58 crc kubenswrapper[4899]: I1003 08:42:58.780907 4899 generic.go:334] "Generic (PLEG): container finished" podID="321b82b3-0aa0-409d-8a8a-8ea91085f407" containerID="0e37cf0cee61b4b693d54f1f6260410639bfb74933a62db79a290ace05695b3c" exitCode=0 Oct 03 08:42:58 crc kubenswrapper[4899]: I1003 08:42:58.780989 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"321b82b3-0aa0-409d-8a8a-8ea91085f407","Type":"ContainerDied","Data":"0e37cf0cee61b4b693d54f1f6260410639bfb74933a62db79a290ace05695b3c"} Oct 03 08:42:58 crc kubenswrapper[4899]: I1003 08:42:58.782941 4899 generic.go:334] "Generic (PLEG): container finished" podID="822debfe-b242-48f7-82f3-5f35d3285775" containerID="2b8789cd33c3fefe3a0adbe9c247b587802daed80ae6f2ecb4bb40afebafc173" exitCode=0 Oct 03 08:42:58 crc kubenswrapper[4899]: I1003 08:42:58.782984 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"822debfe-b242-48f7-82f3-5f35d3285775","Type":"ContainerDied","Data":"2b8789cd33c3fefe3a0adbe9c247b587802daed80ae6f2ecb4bb40afebafc173"} Oct 03 08:42:59 crc kubenswrapper[4899]: I1003 08:42:59.176337 4899 patch_prober.go:28] interesting pod/router-default-5444994796-tfknb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 08:42:59 crc kubenswrapper[4899]: [-]has-synced failed: reason withheld Oct 03 08:42:59 crc kubenswrapper[4899]: [+]process-running ok Oct 03 08:42:59 crc kubenswrapper[4899]: healthz check failed Oct 03 08:42:59 crc kubenswrapper[4899]: I1003 08:42:59.176396 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tfknb" podUID="9c011ebc-362b-4893-9d7e-6a11a1d3e902" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.157489 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.158924 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.179702 4899 patch_prober.go:28] interesting pod/router-default-5444994796-tfknb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 08:43:00 crc kubenswrapper[4899]: [-]has-synced failed: reason withheld Oct 03 08:43:00 crc kubenswrapper[4899]: [+]process-running ok Oct 03 08:43:00 crc kubenswrapper[4899]: healthz check failed Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.179751 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tfknb" podUID="9c011ebc-362b-4893-9d7e-6a11a1d3e902" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.284661 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/822debfe-b242-48f7-82f3-5f35d3285775-kubelet-dir\") pod \"822debfe-b242-48f7-82f3-5f35d3285775\" (UID: \"822debfe-b242-48f7-82f3-5f35d3285775\") " Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.284808 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/822debfe-b242-48f7-82f3-5f35d3285775-kube-api-access\") pod \"822debfe-b242-48f7-82f3-5f35d3285775\" (UID: \"822debfe-b242-48f7-82f3-5f35d3285775\") " Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.284808 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/822debfe-b242-48f7-82f3-5f35d3285775-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "822debfe-b242-48f7-82f3-5f35d3285775" (UID: "822debfe-b242-48f7-82f3-5f35d3285775"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.284848 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/321b82b3-0aa0-409d-8a8a-8ea91085f407-kubelet-dir\") pod \"321b82b3-0aa0-409d-8a8a-8ea91085f407\" (UID: \"321b82b3-0aa0-409d-8a8a-8ea91085f407\") " Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.284882 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/321b82b3-0aa0-409d-8a8a-8ea91085f407-kube-api-access\") pod \"321b82b3-0aa0-409d-8a8a-8ea91085f407\" (UID: \"321b82b3-0aa0-409d-8a8a-8ea91085f407\") " Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.284936 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/321b82b3-0aa0-409d-8a8a-8ea91085f407-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "321b82b3-0aa0-409d-8a8a-8ea91085f407" (UID: "321b82b3-0aa0-409d-8a8a-8ea91085f407"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.285165 4899 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/321b82b3-0aa0-409d-8a8a-8ea91085f407-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.285194 4899 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/822debfe-b242-48f7-82f3-5f35d3285775-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.294095 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/321b82b3-0aa0-409d-8a8a-8ea91085f407-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "321b82b3-0aa0-409d-8a8a-8ea91085f407" (UID: "321b82b3-0aa0-409d-8a8a-8ea91085f407"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.307662 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/822debfe-b242-48f7-82f3-5f35d3285775-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "822debfe-b242-48f7-82f3-5f35d3285775" (UID: "822debfe-b242-48f7-82f3-5f35d3285775"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.386348 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/822debfe-b242-48f7-82f3-5f35d3285775-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.386390 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/321b82b3-0aa0-409d-8a8a-8ea91085f407-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.825025 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.825040 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"321b82b3-0aa0-409d-8a8a-8ea91085f407","Type":"ContainerDied","Data":"004b7b51b058836ad57ca80412017b679bbbb8479ea2f0c9a41823a4e86119fb"} Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.825094 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="004b7b51b058836ad57ca80412017b679bbbb8479ea2f0c9a41823a4e86119fb" Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.828945 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"822debfe-b242-48f7-82f3-5f35d3285775","Type":"ContainerDied","Data":"a63c28a054e882a9a3495926f6bd318383daecb638ec7626c84aa461e289067e"} Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.828988 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a63c28a054e882a9a3495926f6bd318383daecb638ec7626c84aa461e289067e" Oct 03 08:43:00 crc kubenswrapper[4899]: I1003 08:43:00.829012 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 08:43:01 crc kubenswrapper[4899]: I1003 08:43:01.176038 4899 patch_prober.go:28] interesting pod/router-default-5444994796-tfknb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 08:43:01 crc kubenswrapper[4899]: [-]has-synced failed: reason withheld Oct 03 08:43:01 crc kubenswrapper[4899]: [+]process-running ok Oct 03 08:43:01 crc kubenswrapper[4899]: healthz check failed Oct 03 08:43:01 crc kubenswrapper[4899]: I1003 08:43:01.176289 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tfknb" podUID="9c011ebc-362b-4893-9d7e-6a11a1d3e902" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 08:43:02 crc kubenswrapper[4899]: I1003 08:43:02.175723 4899 patch_prober.go:28] interesting pod/router-default-5444994796-tfknb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 08:43:02 crc kubenswrapper[4899]: [-]has-synced failed: reason withheld Oct 03 08:43:02 crc kubenswrapper[4899]: [+]process-running ok Oct 03 08:43:02 crc kubenswrapper[4899]: healthz check failed Oct 03 08:43:02 crc kubenswrapper[4899]: I1003 08:43:02.175789 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tfknb" podUID="9c011ebc-362b-4893-9d7e-6a11a1d3e902" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 08:43:02 crc kubenswrapper[4899]: I1003 08:43:02.627097 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-8j6kq" Oct 03 08:43:03 crc kubenswrapper[4899]: I1003 08:43:03.181345 4899 patch_prober.go:28] interesting pod/router-default-5444994796-tfknb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 08:43:03 crc kubenswrapper[4899]: [-]has-synced failed: reason withheld Oct 03 08:43:03 crc kubenswrapper[4899]: [+]process-running ok Oct 03 08:43:03 crc kubenswrapper[4899]: healthz check failed Oct 03 08:43:03 crc kubenswrapper[4899]: I1003 08:43:03.181758 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tfknb" podUID="9c011ebc-362b-4893-9d7e-6a11a1d3e902" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 08:43:03 crc kubenswrapper[4899]: I1003 08:43:03.460528 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:43:04 crc kubenswrapper[4899]: I1003 08:43:04.180565 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:43:04 crc kubenswrapper[4899]: I1003 08:43:04.183413 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-tfknb" Oct 03 08:43:06 crc kubenswrapper[4899]: I1003 08:43:06.564449 4899 patch_prober.go:28] interesting pod/console-f9d7485db-k28dm container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.9:8443/health\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Oct 03 08:43:06 crc kubenswrapper[4899]: I1003 08:43:06.564763 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-k28dm" podUID="633aedb3-7eca-4c2c-b6c3-69a0f7c4787d" containerName="console" probeResult="failure" output="Get \"https://10.217.0.9:8443/health\": dial tcp 10.217.0.9:8443: connect: connection refused" Oct 03 08:43:07 crc kubenswrapper[4899]: I1003 08:43:07.457009 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-dpwsf" Oct 03 08:43:08 crc kubenswrapper[4899]: I1003 08:43:08.102238 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs\") pod \"network-metrics-daemon-ldv5d\" (UID: \"27fd79a9-c016-46aa-8b67-446a831eb2d8\") " pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:43:08 crc kubenswrapper[4899]: I1003 08:43:08.107229 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27fd79a9-c016-46aa-8b67-446a831eb2d8-metrics-certs\") pod \"network-metrics-daemon-ldv5d\" (UID: \"27fd79a9-c016-46aa-8b67-446a831eb2d8\") " pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:43:08 crc kubenswrapper[4899]: I1003 08:43:08.237488 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ldv5d" Oct 03 08:43:12 crc kubenswrapper[4899]: I1003 08:43:12.198166 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 08:43:12 crc kubenswrapper[4899]: I1003 08:43:12.198509 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 08:43:14 crc kubenswrapper[4899]: I1003 08:43:14.972529 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:43:16 crc kubenswrapper[4899]: I1003 08:43:16.742118 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:43:16 crc kubenswrapper[4899]: I1003 08:43:16.747028 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:43:25 crc kubenswrapper[4899]: E1003 08:43:25.358552 4899 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 03 08:43:25 crc kubenswrapper[4899]: E1003 08:43:25.359367 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pr49d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-5xplz_openshift-marketplace(85fb2d07-4784-45b8-952e-2b12d61ea024): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 08:43:25 crc kubenswrapper[4899]: E1003 08:43:25.360857 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-5xplz" podUID="85fb2d07-4784-45b8-952e-2b12d61ea024" Oct 03 08:43:25 crc kubenswrapper[4899]: E1003 08:43:25.368566 4899 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 03 08:43:25 crc kubenswrapper[4899]: E1003 08:43:25.368739 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rb68l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-9zxwg_openshift-marketplace(d5013bd4-157b-4e1b-b1e7-df61922402cc): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 08:43:25 crc kubenswrapper[4899]: E1003 08:43:25.370036 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-9zxwg" podUID="d5013bd4-157b-4e1b-b1e7-df61922402cc" Oct 03 08:43:26 crc kubenswrapper[4899]: E1003 08:43:26.385993 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-9zxwg" podUID="d5013bd4-157b-4e1b-b1e7-df61922402cc" Oct 03 08:43:26 crc kubenswrapper[4899]: E1003 08:43:26.386073 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-5xplz" podUID="85fb2d07-4784-45b8-952e-2b12d61ea024" Oct 03 08:43:26 crc kubenswrapper[4899]: E1003 08:43:26.460951 4899 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 03 08:43:26 crc kubenswrapper[4899]: E1003 08:43:26.461092 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-775ml,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-jtp4v_openshift-marketplace(49764d57-9b3d-4097-a2f4-08a363c6a25f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 08:43:26 crc kubenswrapper[4899]: E1003 08:43:26.462279 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-jtp4v" podUID="49764d57-9b3d-4097-a2f4-08a363c6a25f" Oct 03 08:43:27 crc kubenswrapper[4899]: I1003 08:43:27.611151 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mcjm4" Oct 03 08:43:28 crc kubenswrapper[4899]: E1003 08:43:28.916398 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-jtp4v" podUID="49764d57-9b3d-4097-a2f4-08a363c6a25f" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.233753 4899 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.233920 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rjjrt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-tnk22_openshift-marketplace(9a5429d2-4cf0-4d34-9bac-fee4008bd409): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.237120 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-tnk22" podUID="9a5429d2-4cf0-4d34-9bac-fee4008bd409" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.244236 4899 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.244392 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kkn7n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-b99vh_openshift-marketplace(4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.246135 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-b99vh" podUID="4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.253743 4899 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.253904 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vht5n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-n9vg4_openshift-marketplace(ea22580f-8ebf-46dc-adc0-c17174f4a096): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.255098 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-n9vg4" podUID="ea22580f-8ebf-46dc-adc0-c17174f4a096" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.765516 4899 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.765749 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7p6pj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-4lpnw_openshift-marketplace(ad6ce9b2-60bf-492e-b583-9147a171e7bd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.767040 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-4lpnw" podUID="ad6ce9b2-60bf-492e-b583-9147a171e7bd" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.782319 4899 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.782736 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z9lb6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-b7jmv_openshift-marketplace(81f9d59c-b58a-4c3a-a9a8-7377c5d17273): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.784992 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-b7jmv" podUID="81f9d59c-b58a-4c3a-a9a8-7377c5d17273" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.998713 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-n9vg4" podUID="ea22580f-8ebf-46dc-adc0-c17174f4a096" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.999266 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-b7jmv" podUID="81f9d59c-b58a-4c3a-a9a8-7377c5d17273" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.999328 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-4lpnw" podUID="ad6ce9b2-60bf-492e-b583-9147a171e7bd" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.999380 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-tnk22" podUID="9a5429d2-4cf0-4d34-9bac-fee4008bd409" Oct 03 08:43:31 crc kubenswrapper[4899]: E1003 08:43:31.999418 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-b99vh" podUID="4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f" Oct 03 08:43:32 crc kubenswrapper[4899]: I1003 08:43:32.084433 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-ldv5d"] Oct 03 08:43:33 crc kubenswrapper[4899]: I1003 08:43:33.003027 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" event={"ID":"27fd79a9-c016-46aa-8b67-446a831eb2d8","Type":"ContainerStarted","Data":"512dbed7cbd9097ff1ecfc59eadc7b2de309e10fce4548f775e95ab886fb2ebd"} Oct 03 08:43:33 crc kubenswrapper[4899]: I1003 08:43:33.003334 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" event={"ID":"27fd79a9-c016-46aa-8b67-446a831eb2d8","Type":"ContainerStarted","Data":"5ea966a268860130ff147184e2452eebe78afaaa5c7645021ffdf64554c3ed76"} Oct 03 08:43:33 crc kubenswrapper[4899]: I1003 08:43:33.003344 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-ldv5d" event={"ID":"27fd79a9-c016-46aa-8b67-446a831eb2d8","Type":"ContainerStarted","Data":"33c7182487a023faeeb9cce2a7dcd647cc0206ce8164f1f832581381093b1339"} Oct 03 08:43:33 crc kubenswrapper[4899]: I1003 08:43:33.026309 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-ldv5d" podStartSLOduration=167.026290822 podStartE2EDuration="2m47.026290822s" podCreationTimestamp="2025-10-03 08:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:43:33.024281 +0000 UTC m=+187.131765963" watchObservedRunningTime="2025-10-03 08:43:33.026290822 +0000 UTC m=+187.133775775" Oct 03 08:43:33 crc kubenswrapper[4899]: I1003 08:43:33.467139 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 08:43:40 crc kubenswrapper[4899]: I1003 08:43:40.041473 4899 generic.go:334] "Generic (PLEG): container finished" podID="85fb2d07-4784-45b8-952e-2b12d61ea024" containerID="50edf9325ac4ab45aad67d24980095054f1ec58586099ef0f034c98e62008509" exitCode=0 Oct 03 08:43:40 crc kubenswrapper[4899]: I1003 08:43:40.041567 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xplz" event={"ID":"85fb2d07-4784-45b8-952e-2b12d61ea024","Type":"ContainerDied","Data":"50edf9325ac4ab45aad67d24980095054f1ec58586099ef0f034c98e62008509"} Oct 03 08:43:41 crc kubenswrapper[4899]: I1003 08:43:41.049726 4899 generic.go:334] "Generic (PLEG): container finished" podID="d5013bd4-157b-4e1b-b1e7-df61922402cc" containerID="5665502aff33a58346f4214e4bedd8b0260291aa841641d28c515f0a78efef94" exitCode=0 Oct 03 08:43:41 crc kubenswrapper[4899]: I1003 08:43:41.049769 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9zxwg" event={"ID":"d5013bd4-157b-4e1b-b1e7-df61922402cc","Type":"ContainerDied","Data":"5665502aff33a58346f4214e4bedd8b0260291aa841641d28c515f0a78efef94"} Oct 03 08:43:42 crc kubenswrapper[4899]: I1003 08:43:42.197797 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 08:43:42 crc kubenswrapper[4899]: I1003 08:43:42.198189 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 08:43:56 crc kubenswrapper[4899]: I1003 08:43:56.134927 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xplz" event={"ID":"85fb2d07-4784-45b8-952e-2b12d61ea024","Type":"ContainerStarted","Data":"9cc116110d155f6971cc17c1a6d3931c2e27ff8547636e4325df1f2bc5b6af39"} Oct 03 08:43:57 crc kubenswrapper[4899]: I1003 08:43:57.142620 4899 generic.go:334] "Generic (PLEG): container finished" podID="49764d57-9b3d-4097-a2f4-08a363c6a25f" containerID="a0a725cd4beb746a5d82769d169f4100312905d27cb852de145a8d54bb844377" exitCode=0 Oct 03 08:43:57 crc kubenswrapper[4899]: I1003 08:43:57.142740 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jtp4v" event={"ID":"49764d57-9b3d-4097-a2f4-08a363c6a25f","Type":"ContainerDied","Data":"a0a725cd4beb746a5d82769d169f4100312905d27cb852de145a8d54bb844377"} Oct 03 08:43:57 crc kubenswrapper[4899]: I1003 08:43:57.147276 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9zxwg" event={"ID":"d5013bd4-157b-4e1b-b1e7-df61922402cc","Type":"ContainerStarted","Data":"e12a01d59c05bb0dcd693b55b3418834d8539684cb22e9b7745d36efc47f6767"} Oct 03 08:43:57 crc kubenswrapper[4899]: I1003 08:43:57.157226 4899 generic.go:334] "Generic (PLEG): container finished" podID="ad6ce9b2-60bf-492e-b583-9147a171e7bd" containerID="07e37717892ce6ad14d5a5bd5ff091e5d5d421efa40e73f1a976094aeae33c3a" exitCode=0 Oct 03 08:43:57 crc kubenswrapper[4899]: I1003 08:43:57.157296 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4lpnw" event={"ID":"ad6ce9b2-60bf-492e-b583-9147a171e7bd","Type":"ContainerDied","Data":"07e37717892ce6ad14d5a5bd5ff091e5d5d421efa40e73f1a976094aeae33c3a"} Oct 03 08:43:57 crc kubenswrapper[4899]: I1003 08:43:57.164407 4899 generic.go:334] "Generic (PLEG): container finished" podID="ea22580f-8ebf-46dc-adc0-c17174f4a096" containerID="2808ce19792e5de523a23d6d1137356f6dc1e583fe901e66902cf141be426ac4" exitCode=0 Oct 03 08:43:57 crc kubenswrapper[4899]: I1003 08:43:57.164535 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9vg4" event={"ID":"ea22580f-8ebf-46dc-adc0-c17174f4a096","Type":"ContainerDied","Data":"2808ce19792e5de523a23d6d1137356f6dc1e583fe901e66902cf141be426ac4"} Oct 03 08:43:57 crc kubenswrapper[4899]: I1003 08:43:57.166670 4899 generic.go:334] "Generic (PLEG): container finished" podID="81f9d59c-b58a-4c3a-a9a8-7377c5d17273" containerID="d7181ab5f2708e4e55bfc491c3e92a0cfff2d3488509ae69c2785cf85a380e4f" exitCode=0 Oct 03 08:43:57 crc kubenswrapper[4899]: I1003 08:43:57.166733 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b7jmv" event={"ID":"81f9d59c-b58a-4c3a-a9a8-7377c5d17273","Type":"ContainerDied","Data":"d7181ab5f2708e4e55bfc491c3e92a0cfff2d3488509ae69c2785cf85a380e4f"} Oct 03 08:43:57 crc kubenswrapper[4899]: I1003 08:43:57.168442 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5xplz" podStartSLOduration=13.305341347 podStartE2EDuration="1m5.16842849s" podCreationTimestamp="2025-10-03 08:42:52 +0000 UTC" firstStartedPulling="2025-10-03 08:42:54.497576333 +0000 UTC m=+148.605061296" lastFinishedPulling="2025-10-03 08:43:46.360663476 +0000 UTC m=+200.468148439" observedRunningTime="2025-10-03 08:43:56.156439052 +0000 UTC m=+210.263924005" watchObservedRunningTime="2025-10-03 08:43:57.16842849 +0000 UTC m=+211.275913443" Oct 03 08:43:57 crc kubenswrapper[4899]: I1003 08:43:57.171343 4899 generic.go:334] "Generic (PLEG): container finished" podID="4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f" containerID="a51276e5016408f8784e77930f96f4e5211256a4bd5a4e6caf35dacbb52df685" exitCode=0 Oct 03 08:43:57 crc kubenswrapper[4899]: I1003 08:43:57.171409 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b99vh" event={"ID":"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f","Type":"ContainerDied","Data":"a51276e5016408f8784e77930f96f4e5211256a4bd5a4e6caf35dacbb52df685"} Oct 03 08:43:57 crc kubenswrapper[4899]: I1003 08:43:57.177238 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnk22" event={"ID":"9a5429d2-4cf0-4d34-9bac-fee4008bd409","Type":"ContainerStarted","Data":"d49e510d11422e0f1ad65b8f50caff5ca151a9396d8e4c1c7cf502a8f2d86798"} Oct 03 08:43:57 crc kubenswrapper[4899]: I1003 08:43:57.200365 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9zxwg" podStartSLOduration=3.522714891 podStartE2EDuration="1m4.200349852s" podCreationTimestamp="2025-10-03 08:42:53 +0000 UTC" firstStartedPulling="2025-10-03 08:42:54.502103723 +0000 UTC m=+148.609588676" lastFinishedPulling="2025-10-03 08:43:55.179738684 +0000 UTC m=+209.287223637" observedRunningTime="2025-10-03 08:43:57.198316287 +0000 UTC m=+211.305801240" watchObservedRunningTime="2025-10-03 08:43:57.200349852 +0000 UTC m=+211.307834805" Oct 03 08:43:58 crc kubenswrapper[4899]: I1003 08:43:58.187310 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4lpnw" event={"ID":"ad6ce9b2-60bf-492e-b583-9147a171e7bd","Type":"ContainerStarted","Data":"3ac769f0fe0af8c0ab61b0681817208913f41f014c16bd1e93edaa2458a1118a"} Oct 03 08:43:58 crc kubenswrapper[4899]: I1003 08:43:58.190776 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9vg4" event={"ID":"ea22580f-8ebf-46dc-adc0-c17174f4a096","Type":"ContainerStarted","Data":"b172423fb70b0a84cba089fe12472b06848b7fef3840a4456ccc9adca7c9aa92"} Oct 03 08:43:58 crc kubenswrapper[4899]: I1003 08:43:58.193587 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b7jmv" event={"ID":"81f9d59c-b58a-4c3a-a9a8-7377c5d17273","Type":"ContainerStarted","Data":"030a4cb6a762f5664c645ddb1524b9538f4c4645487df288d7ec58833737e02b"} Oct 03 08:43:58 crc kubenswrapper[4899]: I1003 08:43:58.196127 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jtp4v" event={"ID":"49764d57-9b3d-4097-a2f4-08a363c6a25f","Type":"ContainerStarted","Data":"f9f7c681a07a7b83ef38983939eed4f7e6d396db27d02184a12f450f6632573c"} Oct 03 08:43:58 crc kubenswrapper[4899]: I1003 08:43:58.198601 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b99vh" event={"ID":"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f","Type":"ContainerStarted","Data":"7cd91724e17915e17f3f40b9b7fc0c33fbf64850da730fd8ed9ece3263eead8c"} Oct 03 08:43:58 crc kubenswrapper[4899]: I1003 08:43:58.200969 4899 generic.go:334] "Generic (PLEG): container finished" podID="9a5429d2-4cf0-4d34-9bac-fee4008bd409" containerID="d49e510d11422e0f1ad65b8f50caff5ca151a9396d8e4c1c7cf502a8f2d86798" exitCode=0 Oct 03 08:43:58 crc kubenswrapper[4899]: I1003 08:43:58.200991 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnk22" event={"ID":"9a5429d2-4cf0-4d34-9bac-fee4008bd409","Type":"ContainerDied","Data":"d49e510d11422e0f1ad65b8f50caff5ca151a9396d8e4c1c7cf502a8f2d86798"} Oct 03 08:43:58 crc kubenswrapper[4899]: I1003 08:43:58.208087 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4lpnw" podStartSLOduration=1.844866416 podStartE2EDuration="1m4.208072644s" podCreationTimestamp="2025-10-03 08:42:54 +0000 UTC" firstStartedPulling="2025-10-03 08:42:55.584540631 +0000 UTC m=+149.692025584" lastFinishedPulling="2025-10-03 08:43:57.947746859 +0000 UTC m=+212.055231812" observedRunningTime="2025-10-03 08:43:58.206281546 +0000 UTC m=+212.313766499" watchObservedRunningTime="2025-10-03 08:43:58.208072644 +0000 UTC m=+212.315557597" Oct 03 08:43:58 crc kubenswrapper[4899]: I1003 08:43:58.232732 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b99vh" podStartSLOduration=3.106871068 podStartE2EDuration="1m3.232717384s" podCreationTimestamp="2025-10-03 08:42:55 +0000 UTC" firstStartedPulling="2025-10-03 08:42:57.779106547 +0000 UTC m=+151.886591500" lastFinishedPulling="2025-10-03 08:43:57.904952863 +0000 UTC m=+212.012437816" observedRunningTime="2025-10-03 08:43:58.228362967 +0000 UTC m=+212.335847920" watchObservedRunningTime="2025-10-03 08:43:58.232717384 +0000 UTC m=+212.340202337" Oct 03 08:43:58 crc kubenswrapper[4899]: I1003 08:43:58.281561 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jtp4v" podStartSLOduration=2.995935453 podStartE2EDuration="1m6.281543503s" podCreationTimestamp="2025-10-03 08:42:52 +0000 UTC" firstStartedPulling="2025-10-03 08:42:54.499509922 +0000 UTC m=+148.606994875" lastFinishedPulling="2025-10-03 08:43:57.785117972 +0000 UTC m=+211.892602925" observedRunningTime="2025-10-03 08:43:58.279031393 +0000 UTC m=+212.386516336" watchObservedRunningTime="2025-10-03 08:43:58.281543503 +0000 UTC m=+212.389028456" Oct 03 08:43:58 crc kubenswrapper[4899]: I1003 08:43:58.333202 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-n9vg4" podStartSLOduration=2.981480067 podStartE2EDuration="1m6.333173799s" podCreationTimestamp="2025-10-03 08:42:52 +0000 UTC" firstStartedPulling="2025-10-03 08:42:54.508087108 +0000 UTC m=+148.615572061" lastFinishedPulling="2025-10-03 08:43:57.85978083 +0000 UTC m=+211.967265793" observedRunningTime="2025-10-03 08:43:58.311966648 +0000 UTC m=+212.419451601" watchObservedRunningTime="2025-10-03 08:43:58.333173799 +0000 UTC m=+212.440658752" Oct 03 08:43:58 crc kubenswrapper[4899]: I1003 08:43:58.333340 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-b7jmv" podStartSLOduration=2.18270758 podStartE2EDuration="1m3.333334475s" podCreationTimestamp="2025-10-03 08:42:55 +0000 UTC" firstStartedPulling="2025-10-03 08:42:56.613710619 +0000 UTC m=+150.721195572" lastFinishedPulling="2025-10-03 08:43:57.764337524 +0000 UTC m=+211.871822467" observedRunningTime="2025-10-03 08:43:58.331139805 +0000 UTC m=+212.438624758" watchObservedRunningTime="2025-10-03 08:43:58.333334475 +0000 UTC m=+212.440819428" Oct 03 08:44:00 crc kubenswrapper[4899]: I1003 08:44:00.216356 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnk22" event={"ID":"9a5429d2-4cf0-4d34-9bac-fee4008bd409","Type":"ContainerStarted","Data":"3099674e5899f2afab27131de867dbe8e9c4097ae518db85bbe5579b94a9d058"} Oct 03 08:44:00 crc kubenswrapper[4899]: I1003 08:44:00.234253 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tnk22" podStartSLOduration=2.738180674 podStartE2EDuration="1m4.234232297s" podCreationTimestamp="2025-10-03 08:42:56 +0000 UTC" firstStartedPulling="2025-10-03 08:42:57.698291392 +0000 UTC m=+151.805776345" lastFinishedPulling="2025-10-03 08:43:59.194343015 +0000 UTC m=+213.301827968" observedRunningTime="2025-10-03 08:44:00.231914383 +0000 UTC m=+214.339399336" watchObservedRunningTime="2025-10-03 08:44:00.234232297 +0000 UTC m=+214.341717250" Oct 03 08:44:02 crc kubenswrapper[4899]: I1003 08:44:02.853990 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jtp4v" Oct 03 08:44:02 crc kubenswrapper[4899]: I1003 08:44:02.854322 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jtp4v" Oct 03 08:44:03 crc kubenswrapper[4899]: I1003 08:44:03.018828 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5xplz" Oct 03 08:44:03 crc kubenswrapper[4899]: I1003 08:44:03.019763 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5xplz" Oct 03 08:44:03 crc kubenswrapper[4899]: I1003 08:44:03.257182 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-n9vg4" Oct 03 08:44:03 crc kubenswrapper[4899]: I1003 08:44:03.257240 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-n9vg4" Oct 03 08:44:03 crc kubenswrapper[4899]: I1003 08:44:03.430071 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9zxwg" Oct 03 08:44:03 crc kubenswrapper[4899]: I1003 08:44:03.430131 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9zxwg" Oct 03 08:44:03 crc kubenswrapper[4899]: I1003 08:44:03.916509 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-n9vg4" Oct 03 08:44:03 crc kubenswrapper[4899]: I1003 08:44:03.916656 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9zxwg" Oct 03 08:44:03 crc kubenswrapper[4899]: I1003 08:44:03.916852 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jtp4v" Oct 03 08:44:03 crc kubenswrapper[4899]: I1003 08:44:03.919146 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5xplz" Oct 03 08:44:03 crc kubenswrapper[4899]: I1003 08:44:03.957018 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5xplz" Oct 03 08:44:03 crc kubenswrapper[4899]: I1003 08:44:03.961351 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jtp4v" Oct 03 08:44:04 crc kubenswrapper[4899]: I1003 08:44:04.283440 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9zxwg" Oct 03 08:44:04 crc kubenswrapper[4899]: I1003 08:44:04.292592 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-n9vg4" Oct 03 08:44:04 crc kubenswrapper[4899]: I1003 08:44:04.765584 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n9vg4"] Oct 03 08:44:05 crc kubenswrapper[4899]: I1003 08:44:05.012477 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4lpnw" Oct 03 08:44:05 crc kubenswrapper[4899]: I1003 08:44:05.012547 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4lpnw" Oct 03 08:44:05 crc kubenswrapper[4899]: I1003 08:44:05.057953 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4lpnw" Oct 03 08:44:05 crc kubenswrapper[4899]: I1003 08:44:05.282758 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4lpnw" Oct 03 08:44:05 crc kubenswrapper[4899]: I1003 08:44:05.434370 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-b7jmv" Oct 03 08:44:05 crc kubenswrapper[4899]: I1003 08:44:05.434687 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-b7jmv" Oct 03 08:44:05 crc kubenswrapper[4899]: I1003 08:44:05.483965 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-b7jmv" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.040290 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b99vh" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.040388 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b99vh" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.093075 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b99vh" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.161706 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9zxwg"] Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.243259 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-n9vg4" podUID="ea22580f-8ebf-46dc-adc0-c17174f4a096" containerName="registry-server" containerID="cri-o://b172423fb70b0a84cba089fe12472b06848b7fef3840a4456ccc9adca7c9aa92" gracePeriod=2 Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.243718 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9zxwg" podUID="d5013bd4-157b-4e1b-b1e7-df61922402cc" containerName="registry-server" containerID="cri-o://e12a01d59c05bb0dcd693b55b3418834d8539684cb22e9b7745d36efc47f6767" gracePeriod=2 Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.286454 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b99vh" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.287011 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-b7jmv" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.494937 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tnk22" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.495410 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tnk22" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.546719 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tnk22" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.619671 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9vg4" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.623819 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9zxwg" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.668804 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea22580f-8ebf-46dc-adc0-c17174f4a096-utilities\") pod \"ea22580f-8ebf-46dc-adc0-c17174f4a096\" (UID: \"ea22580f-8ebf-46dc-adc0-c17174f4a096\") " Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.668854 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rb68l\" (UniqueName: \"kubernetes.io/projected/d5013bd4-157b-4e1b-b1e7-df61922402cc-kube-api-access-rb68l\") pod \"d5013bd4-157b-4e1b-b1e7-df61922402cc\" (UID: \"d5013bd4-157b-4e1b-b1e7-df61922402cc\") " Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.668885 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea22580f-8ebf-46dc-adc0-c17174f4a096-catalog-content\") pod \"ea22580f-8ebf-46dc-adc0-c17174f4a096\" (UID: \"ea22580f-8ebf-46dc-adc0-c17174f4a096\") " Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.668919 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5013bd4-157b-4e1b-b1e7-df61922402cc-utilities\") pod \"d5013bd4-157b-4e1b-b1e7-df61922402cc\" (UID: \"d5013bd4-157b-4e1b-b1e7-df61922402cc\") " Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.668967 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vht5n\" (UniqueName: \"kubernetes.io/projected/ea22580f-8ebf-46dc-adc0-c17174f4a096-kube-api-access-vht5n\") pod \"ea22580f-8ebf-46dc-adc0-c17174f4a096\" (UID: \"ea22580f-8ebf-46dc-adc0-c17174f4a096\") " Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.669027 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5013bd4-157b-4e1b-b1e7-df61922402cc-catalog-content\") pod \"d5013bd4-157b-4e1b-b1e7-df61922402cc\" (UID: \"d5013bd4-157b-4e1b-b1e7-df61922402cc\") " Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.669638 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea22580f-8ebf-46dc-adc0-c17174f4a096-utilities" (OuterVolumeSpecName: "utilities") pod "ea22580f-8ebf-46dc-adc0-c17174f4a096" (UID: "ea22580f-8ebf-46dc-adc0-c17174f4a096"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.677865 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5013bd4-157b-4e1b-b1e7-df61922402cc-utilities" (OuterVolumeSpecName: "utilities") pod "d5013bd4-157b-4e1b-b1e7-df61922402cc" (UID: "d5013bd4-157b-4e1b-b1e7-df61922402cc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.683199 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea22580f-8ebf-46dc-adc0-c17174f4a096-kube-api-access-vht5n" (OuterVolumeSpecName: "kube-api-access-vht5n") pod "ea22580f-8ebf-46dc-adc0-c17174f4a096" (UID: "ea22580f-8ebf-46dc-adc0-c17174f4a096"). InnerVolumeSpecName "kube-api-access-vht5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.698301 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5013bd4-157b-4e1b-b1e7-df61922402cc-kube-api-access-rb68l" (OuterVolumeSpecName: "kube-api-access-rb68l") pod "d5013bd4-157b-4e1b-b1e7-df61922402cc" (UID: "d5013bd4-157b-4e1b-b1e7-df61922402cc"). InnerVolumeSpecName "kube-api-access-rb68l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.716992 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5013bd4-157b-4e1b-b1e7-df61922402cc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d5013bd4-157b-4e1b-b1e7-df61922402cc" (UID: "d5013bd4-157b-4e1b-b1e7-df61922402cc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.736699 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea22580f-8ebf-46dc-adc0-c17174f4a096-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ea22580f-8ebf-46dc-adc0-c17174f4a096" (UID: "ea22580f-8ebf-46dc-adc0-c17174f4a096"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.770074 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5013bd4-157b-4e1b-b1e7-df61922402cc-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.770121 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea22580f-8ebf-46dc-adc0-c17174f4a096-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.770134 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rb68l\" (UniqueName: \"kubernetes.io/projected/d5013bd4-157b-4e1b-b1e7-df61922402cc-kube-api-access-rb68l\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.770149 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea22580f-8ebf-46dc-adc0-c17174f4a096-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.770160 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5013bd4-157b-4e1b-b1e7-df61922402cc-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.770173 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vht5n\" (UniqueName: \"kubernetes.io/projected/ea22580f-8ebf-46dc-adc0-c17174f4a096-kube-api-access-vht5n\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:06 crc kubenswrapper[4899]: I1003 08:44:06.968726 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tkpkx"] Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.250668 4899 generic.go:334] "Generic (PLEG): container finished" podID="d5013bd4-157b-4e1b-b1e7-df61922402cc" containerID="e12a01d59c05bb0dcd693b55b3418834d8539684cb22e9b7745d36efc47f6767" exitCode=0 Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.251037 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9zxwg" event={"ID":"d5013bd4-157b-4e1b-b1e7-df61922402cc","Type":"ContainerDied","Data":"e12a01d59c05bb0dcd693b55b3418834d8539684cb22e9b7745d36efc47f6767"} Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.251100 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9zxwg" event={"ID":"d5013bd4-157b-4e1b-b1e7-df61922402cc","Type":"ContainerDied","Data":"2577889c44cc7511e5b456057423e89e37eb8eb6ad18ff61b156772fd62c9a3b"} Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.251124 4899 scope.go:117] "RemoveContainer" containerID="e12a01d59c05bb0dcd693b55b3418834d8539684cb22e9b7745d36efc47f6767" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.251329 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9zxwg" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.256634 4899 generic.go:334] "Generic (PLEG): container finished" podID="ea22580f-8ebf-46dc-adc0-c17174f4a096" containerID="b172423fb70b0a84cba089fe12472b06848b7fef3840a4456ccc9adca7c9aa92" exitCode=0 Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.257701 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9vg4" event={"ID":"ea22580f-8ebf-46dc-adc0-c17174f4a096","Type":"ContainerDied","Data":"b172423fb70b0a84cba089fe12472b06848b7fef3840a4456ccc9adca7c9aa92"} Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.257757 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9vg4" event={"ID":"ea22580f-8ebf-46dc-adc0-c17174f4a096","Type":"ContainerDied","Data":"c55dc1d615753824e903f57104c46a9221bf368f62b96ca71ef00dc67fb9ac51"} Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.257842 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9vg4" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.275054 4899 scope.go:117] "RemoveContainer" containerID="5665502aff33a58346f4214e4bedd8b0260291aa841641d28c515f0a78efef94" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.302106 4899 scope.go:117] "RemoveContainer" containerID="94190176e53912947674ea3df2e1cb5d855caa22b75380923ec4473c72235a32" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.303790 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tnk22" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.318135 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n9vg4"] Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.323372 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-n9vg4"] Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.325626 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9zxwg"] Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.328617 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9zxwg"] Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.329811 4899 scope.go:117] "RemoveContainer" containerID="e12a01d59c05bb0dcd693b55b3418834d8539684cb22e9b7745d36efc47f6767" Oct 03 08:44:07 crc kubenswrapper[4899]: E1003 08:44:07.330331 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e12a01d59c05bb0dcd693b55b3418834d8539684cb22e9b7745d36efc47f6767\": container with ID starting with e12a01d59c05bb0dcd693b55b3418834d8539684cb22e9b7745d36efc47f6767 not found: ID does not exist" containerID="e12a01d59c05bb0dcd693b55b3418834d8539684cb22e9b7745d36efc47f6767" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.330356 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e12a01d59c05bb0dcd693b55b3418834d8539684cb22e9b7745d36efc47f6767"} err="failed to get container status \"e12a01d59c05bb0dcd693b55b3418834d8539684cb22e9b7745d36efc47f6767\": rpc error: code = NotFound desc = could not find container \"e12a01d59c05bb0dcd693b55b3418834d8539684cb22e9b7745d36efc47f6767\": container with ID starting with e12a01d59c05bb0dcd693b55b3418834d8539684cb22e9b7745d36efc47f6767 not found: ID does not exist" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.330393 4899 scope.go:117] "RemoveContainer" containerID="5665502aff33a58346f4214e4bedd8b0260291aa841641d28c515f0a78efef94" Oct 03 08:44:07 crc kubenswrapper[4899]: E1003 08:44:07.330651 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5665502aff33a58346f4214e4bedd8b0260291aa841641d28c515f0a78efef94\": container with ID starting with 5665502aff33a58346f4214e4bedd8b0260291aa841641d28c515f0a78efef94 not found: ID does not exist" containerID="5665502aff33a58346f4214e4bedd8b0260291aa841641d28c515f0a78efef94" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.330696 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5665502aff33a58346f4214e4bedd8b0260291aa841641d28c515f0a78efef94"} err="failed to get container status \"5665502aff33a58346f4214e4bedd8b0260291aa841641d28c515f0a78efef94\": rpc error: code = NotFound desc = could not find container \"5665502aff33a58346f4214e4bedd8b0260291aa841641d28c515f0a78efef94\": container with ID starting with 5665502aff33a58346f4214e4bedd8b0260291aa841641d28c515f0a78efef94 not found: ID does not exist" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.330727 4899 scope.go:117] "RemoveContainer" containerID="94190176e53912947674ea3df2e1cb5d855caa22b75380923ec4473c72235a32" Oct 03 08:44:07 crc kubenswrapper[4899]: E1003 08:44:07.331252 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94190176e53912947674ea3df2e1cb5d855caa22b75380923ec4473c72235a32\": container with ID starting with 94190176e53912947674ea3df2e1cb5d855caa22b75380923ec4473c72235a32 not found: ID does not exist" containerID="94190176e53912947674ea3df2e1cb5d855caa22b75380923ec4473c72235a32" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.331274 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94190176e53912947674ea3df2e1cb5d855caa22b75380923ec4473c72235a32"} err="failed to get container status \"94190176e53912947674ea3df2e1cb5d855caa22b75380923ec4473c72235a32\": rpc error: code = NotFound desc = could not find container \"94190176e53912947674ea3df2e1cb5d855caa22b75380923ec4473c72235a32\": container with ID starting with 94190176e53912947674ea3df2e1cb5d855caa22b75380923ec4473c72235a32 not found: ID does not exist" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.331290 4899 scope.go:117] "RemoveContainer" containerID="b172423fb70b0a84cba089fe12472b06848b7fef3840a4456ccc9adca7c9aa92" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.346741 4899 scope.go:117] "RemoveContainer" containerID="2808ce19792e5de523a23d6d1137356f6dc1e583fe901e66902cf141be426ac4" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.361162 4899 scope.go:117] "RemoveContainer" containerID="7fcb56b1f35756dbe1c702e923e168abd18461fb7607404a44ccc49fb889bbf0" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.373964 4899 scope.go:117] "RemoveContainer" containerID="b172423fb70b0a84cba089fe12472b06848b7fef3840a4456ccc9adca7c9aa92" Oct 03 08:44:07 crc kubenswrapper[4899]: E1003 08:44:07.375211 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b172423fb70b0a84cba089fe12472b06848b7fef3840a4456ccc9adca7c9aa92\": container with ID starting with b172423fb70b0a84cba089fe12472b06848b7fef3840a4456ccc9adca7c9aa92 not found: ID does not exist" containerID="b172423fb70b0a84cba089fe12472b06848b7fef3840a4456ccc9adca7c9aa92" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.375241 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b172423fb70b0a84cba089fe12472b06848b7fef3840a4456ccc9adca7c9aa92"} err="failed to get container status \"b172423fb70b0a84cba089fe12472b06848b7fef3840a4456ccc9adca7c9aa92\": rpc error: code = NotFound desc = could not find container \"b172423fb70b0a84cba089fe12472b06848b7fef3840a4456ccc9adca7c9aa92\": container with ID starting with b172423fb70b0a84cba089fe12472b06848b7fef3840a4456ccc9adca7c9aa92 not found: ID does not exist" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.375265 4899 scope.go:117] "RemoveContainer" containerID="2808ce19792e5de523a23d6d1137356f6dc1e583fe901e66902cf141be426ac4" Oct 03 08:44:07 crc kubenswrapper[4899]: E1003 08:44:07.375498 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2808ce19792e5de523a23d6d1137356f6dc1e583fe901e66902cf141be426ac4\": container with ID starting with 2808ce19792e5de523a23d6d1137356f6dc1e583fe901e66902cf141be426ac4 not found: ID does not exist" containerID="2808ce19792e5de523a23d6d1137356f6dc1e583fe901e66902cf141be426ac4" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.375527 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2808ce19792e5de523a23d6d1137356f6dc1e583fe901e66902cf141be426ac4"} err="failed to get container status \"2808ce19792e5de523a23d6d1137356f6dc1e583fe901e66902cf141be426ac4\": rpc error: code = NotFound desc = could not find container \"2808ce19792e5de523a23d6d1137356f6dc1e583fe901e66902cf141be426ac4\": container with ID starting with 2808ce19792e5de523a23d6d1137356f6dc1e583fe901e66902cf141be426ac4 not found: ID does not exist" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.375547 4899 scope.go:117] "RemoveContainer" containerID="7fcb56b1f35756dbe1c702e923e168abd18461fb7607404a44ccc49fb889bbf0" Oct 03 08:44:07 crc kubenswrapper[4899]: E1003 08:44:07.375929 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fcb56b1f35756dbe1c702e923e168abd18461fb7607404a44ccc49fb889bbf0\": container with ID starting with 7fcb56b1f35756dbe1c702e923e168abd18461fb7607404a44ccc49fb889bbf0 not found: ID does not exist" containerID="7fcb56b1f35756dbe1c702e923e168abd18461fb7607404a44ccc49fb889bbf0" Oct 03 08:44:07 crc kubenswrapper[4899]: I1003 08:44:07.375951 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fcb56b1f35756dbe1c702e923e168abd18461fb7607404a44ccc49fb889bbf0"} err="failed to get container status \"7fcb56b1f35756dbe1c702e923e168abd18461fb7607404a44ccc49fb889bbf0\": rpc error: code = NotFound desc = could not find container \"7fcb56b1f35756dbe1c702e923e168abd18461fb7607404a44ccc49fb889bbf0\": container with ID starting with 7fcb56b1f35756dbe1c702e923e168abd18461fb7607404a44ccc49fb889bbf0 not found: ID does not exist" Oct 03 08:44:08 crc kubenswrapper[4899]: I1003 08:44:08.535213 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5013bd4-157b-4e1b-b1e7-df61922402cc" path="/var/lib/kubelet/pods/d5013bd4-157b-4e1b-b1e7-df61922402cc/volumes" Oct 03 08:44:08 crc kubenswrapper[4899]: I1003 08:44:08.536254 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea22580f-8ebf-46dc-adc0-c17174f4a096" path="/var/lib/kubelet/pods/ea22580f-8ebf-46dc-adc0-c17174f4a096/volumes" Oct 03 08:44:08 crc kubenswrapper[4899]: I1003 08:44:08.567903 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b7jmv"] Oct 03 08:44:08 crc kubenswrapper[4899]: I1003 08:44:08.568237 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-b7jmv" podUID="81f9d59c-b58a-4c3a-a9a8-7377c5d17273" containerName="registry-server" containerID="cri-o://030a4cb6a762f5664c645ddb1524b9538f4c4645487df288d7ec58833737e02b" gracePeriod=2 Oct 03 08:44:08 crc kubenswrapper[4899]: I1003 08:44:08.949486 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b7jmv" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.095587 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f9d59c-b58a-4c3a-a9a8-7377c5d17273-utilities\") pod \"81f9d59c-b58a-4c3a-a9a8-7377c5d17273\" (UID: \"81f9d59c-b58a-4c3a-a9a8-7377c5d17273\") " Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.095636 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9lb6\" (UniqueName: \"kubernetes.io/projected/81f9d59c-b58a-4c3a-a9a8-7377c5d17273-kube-api-access-z9lb6\") pod \"81f9d59c-b58a-4c3a-a9a8-7377c5d17273\" (UID: \"81f9d59c-b58a-4c3a-a9a8-7377c5d17273\") " Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.095703 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f9d59c-b58a-4c3a-a9a8-7377c5d17273-catalog-content\") pod \"81f9d59c-b58a-4c3a-a9a8-7377c5d17273\" (UID: \"81f9d59c-b58a-4c3a-a9a8-7377c5d17273\") " Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.096443 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81f9d59c-b58a-4c3a-a9a8-7377c5d17273-utilities" (OuterVolumeSpecName: "utilities") pod "81f9d59c-b58a-4c3a-a9a8-7377c5d17273" (UID: "81f9d59c-b58a-4c3a-a9a8-7377c5d17273"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.100665 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81f9d59c-b58a-4c3a-a9a8-7377c5d17273-kube-api-access-z9lb6" (OuterVolumeSpecName: "kube-api-access-z9lb6") pod "81f9d59c-b58a-4c3a-a9a8-7377c5d17273" (UID: "81f9d59c-b58a-4c3a-a9a8-7377c5d17273"). InnerVolumeSpecName "kube-api-access-z9lb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.124050 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81f9d59c-b58a-4c3a-a9a8-7377c5d17273-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "81f9d59c-b58a-4c3a-a9a8-7377c5d17273" (UID: "81f9d59c-b58a-4c3a-a9a8-7377c5d17273"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.161855 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tnk22"] Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.197163 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f9d59c-b58a-4c3a-a9a8-7377c5d17273-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.197206 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9lb6\" (UniqueName: \"kubernetes.io/projected/81f9d59c-b58a-4c3a-a9a8-7377c5d17273-kube-api-access-z9lb6\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.197217 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f9d59c-b58a-4c3a-a9a8-7377c5d17273-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.272303 4899 generic.go:334] "Generic (PLEG): container finished" podID="81f9d59c-b58a-4c3a-a9a8-7377c5d17273" containerID="030a4cb6a762f5664c645ddb1524b9538f4c4645487df288d7ec58833737e02b" exitCode=0 Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.272377 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b7jmv" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.272415 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b7jmv" event={"ID":"81f9d59c-b58a-4c3a-a9a8-7377c5d17273","Type":"ContainerDied","Data":"030a4cb6a762f5664c645ddb1524b9538f4c4645487df288d7ec58833737e02b"} Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.272452 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b7jmv" event={"ID":"81f9d59c-b58a-4c3a-a9a8-7377c5d17273","Type":"ContainerDied","Data":"e828ad7e0ef82087ebbc206026ff44fae9a8d426a21bd2ed12075cc2e75ace60"} Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.272474 4899 scope.go:117] "RemoveContainer" containerID="030a4cb6a762f5664c645ddb1524b9538f4c4645487df288d7ec58833737e02b" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.272518 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tnk22" podUID="9a5429d2-4cf0-4d34-9bac-fee4008bd409" containerName="registry-server" containerID="cri-o://3099674e5899f2afab27131de867dbe8e9c4097ae518db85bbe5579b94a9d058" gracePeriod=2 Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.292230 4899 scope.go:117] "RemoveContainer" containerID="d7181ab5f2708e4e55bfc491c3e92a0cfff2d3488509ae69c2785cf85a380e4f" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.301457 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b7jmv"] Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.305017 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-b7jmv"] Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.344694 4899 scope.go:117] "RemoveContainer" containerID="56997bd971cb15f0ccd53568145f017557ad239fbec0724160dfa71cfa107d7c" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.425110 4899 scope.go:117] "RemoveContainer" containerID="030a4cb6a762f5664c645ddb1524b9538f4c4645487df288d7ec58833737e02b" Oct 03 08:44:09 crc kubenswrapper[4899]: E1003 08:44:09.427139 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"030a4cb6a762f5664c645ddb1524b9538f4c4645487df288d7ec58833737e02b\": container with ID starting with 030a4cb6a762f5664c645ddb1524b9538f4c4645487df288d7ec58833737e02b not found: ID does not exist" containerID="030a4cb6a762f5664c645ddb1524b9538f4c4645487df288d7ec58833737e02b" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.427185 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"030a4cb6a762f5664c645ddb1524b9538f4c4645487df288d7ec58833737e02b"} err="failed to get container status \"030a4cb6a762f5664c645ddb1524b9538f4c4645487df288d7ec58833737e02b\": rpc error: code = NotFound desc = could not find container \"030a4cb6a762f5664c645ddb1524b9538f4c4645487df288d7ec58833737e02b\": container with ID starting with 030a4cb6a762f5664c645ddb1524b9538f4c4645487df288d7ec58833737e02b not found: ID does not exist" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.427218 4899 scope.go:117] "RemoveContainer" containerID="d7181ab5f2708e4e55bfc491c3e92a0cfff2d3488509ae69c2785cf85a380e4f" Oct 03 08:44:09 crc kubenswrapper[4899]: E1003 08:44:09.427587 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7181ab5f2708e4e55bfc491c3e92a0cfff2d3488509ae69c2785cf85a380e4f\": container with ID starting with d7181ab5f2708e4e55bfc491c3e92a0cfff2d3488509ae69c2785cf85a380e4f not found: ID does not exist" containerID="d7181ab5f2708e4e55bfc491c3e92a0cfff2d3488509ae69c2785cf85a380e4f" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.427618 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7181ab5f2708e4e55bfc491c3e92a0cfff2d3488509ae69c2785cf85a380e4f"} err="failed to get container status \"d7181ab5f2708e4e55bfc491c3e92a0cfff2d3488509ae69c2785cf85a380e4f\": rpc error: code = NotFound desc = could not find container \"d7181ab5f2708e4e55bfc491c3e92a0cfff2d3488509ae69c2785cf85a380e4f\": container with ID starting with d7181ab5f2708e4e55bfc491c3e92a0cfff2d3488509ae69c2785cf85a380e4f not found: ID does not exist" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.427640 4899 scope.go:117] "RemoveContainer" containerID="56997bd971cb15f0ccd53568145f017557ad239fbec0724160dfa71cfa107d7c" Oct 03 08:44:09 crc kubenswrapper[4899]: E1003 08:44:09.428129 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56997bd971cb15f0ccd53568145f017557ad239fbec0724160dfa71cfa107d7c\": container with ID starting with 56997bd971cb15f0ccd53568145f017557ad239fbec0724160dfa71cfa107d7c not found: ID does not exist" containerID="56997bd971cb15f0ccd53568145f017557ad239fbec0724160dfa71cfa107d7c" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.428173 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56997bd971cb15f0ccd53568145f017557ad239fbec0724160dfa71cfa107d7c"} err="failed to get container status \"56997bd971cb15f0ccd53568145f017557ad239fbec0724160dfa71cfa107d7c\": rpc error: code = NotFound desc = could not find container \"56997bd971cb15f0ccd53568145f017557ad239fbec0724160dfa71cfa107d7c\": container with ID starting with 56997bd971cb15f0ccd53568145f017557ad239fbec0724160dfa71cfa107d7c not found: ID does not exist" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.550284 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnk22" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.704977 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjjrt\" (UniqueName: \"kubernetes.io/projected/9a5429d2-4cf0-4d34-9bac-fee4008bd409-kube-api-access-rjjrt\") pod \"9a5429d2-4cf0-4d34-9bac-fee4008bd409\" (UID: \"9a5429d2-4cf0-4d34-9bac-fee4008bd409\") " Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.705019 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a5429d2-4cf0-4d34-9bac-fee4008bd409-catalog-content\") pod \"9a5429d2-4cf0-4d34-9bac-fee4008bd409\" (UID: \"9a5429d2-4cf0-4d34-9bac-fee4008bd409\") " Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.705122 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a5429d2-4cf0-4d34-9bac-fee4008bd409-utilities\") pod \"9a5429d2-4cf0-4d34-9bac-fee4008bd409\" (UID: \"9a5429d2-4cf0-4d34-9bac-fee4008bd409\") " Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.705959 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a5429d2-4cf0-4d34-9bac-fee4008bd409-utilities" (OuterVolumeSpecName: "utilities") pod "9a5429d2-4cf0-4d34-9bac-fee4008bd409" (UID: "9a5429d2-4cf0-4d34-9bac-fee4008bd409"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.710101 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a5429d2-4cf0-4d34-9bac-fee4008bd409-kube-api-access-rjjrt" (OuterVolumeSpecName: "kube-api-access-rjjrt") pod "9a5429d2-4cf0-4d34-9bac-fee4008bd409" (UID: "9a5429d2-4cf0-4d34-9bac-fee4008bd409"). InnerVolumeSpecName "kube-api-access-rjjrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.807121 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjjrt\" (UniqueName: \"kubernetes.io/projected/9a5429d2-4cf0-4d34-9bac-fee4008bd409-kube-api-access-rjjrt\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.807184 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a5429d2-4cf0-4d34-9bac-fee4008bd409-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.809431 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a5429d2-4cf0-4d34-9bac-fee4008bd409-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9a5429d2-4cf0-4d34-9bac-fee4008bd409" (UID: "9a5429d2-4cf0-4d34-9bac-fee4008bd409"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:44:09 crc kubenswrapper[4899]: I1003 08:44:09.908317 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a5429d2-4cf0-4d34-9bac-fee4008bd409-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:10 crc kubenswrapper[4899]: I1003 08:44:10.280021 4899 generic.go:334] "Generic (PLEG): container finished" podID="9a5429d2-4cf0-4d34-9bac-fee4008bd409" containerID="3099674e5899f2afab27131de867dbe8e9c4097ae518db85bbe5579b94a9d058" exitCode=0 Oct 03 08:44:10 crc kubenswrapper[4899]: I1003 08:44:10.280073 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnk22" event={"ID":"9a5429d2-4cf0-4d34-9bac-fee4008bd409","Type":"ContainerDied","Data":"3099674e5899f2afab27131de867dbe8e9c4097ae518db85bbe5579b94a9d058"} Oct 03 08:44:10 crc kubenswrapper[4899]: I1003 08:44:10.280110 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnk22" event={"ID":"9a5429d2-4cf0-4d34-9bac-fee4008bd409","Type":"ContainerDied","Data":"411aaf22d20388fdf35ebbed6638b0feb5794b441f1e19cc6b1672f8b89c82cd"} Oct 03 08:44:10 crc kubenswrapper[4899]: I1003 08:44:10.280080 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnk22" Oct 03 08:44:10 crc kubenswrapper[4899]: I1003 08:44:10.280134 4899 scope.go:117] "RemoveContainer" containerID="3099674e5899f2afab27131de867dbe8e9c4097ae518db85bbe5579b94a9d058" Oct 03 08:44:10 crc kubenswrapper[4899]: I1003 08:44:10.297130 4899 scope.go:117] "RemoveContainer" containerID="d49e510d11422e0f1ad65b8f50caff5ca151a9396d8e4c1c7cf502a8f2d86798" Oct 03 08:44:10 crc kubenswrapper[4899]: I1003 08:44:10.309242 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tnk22"] Oct 03 08:44:10 crc kubenswrapper[4899]: I1003 08:44:10.312647 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tnk22"] Oct 03 08:44:10 crc kubenswrapper[4899]: I1003 08:44:10.323414 4899 scope.go:117] "RemoveContainer" containerID="e18e88fcd01b1eebbc5191d126ed4f3f7113c9d482bb45d162509b54aee9e802" Oct 03 08:44:10 crc kubenswrapper[4899]: I1003 08:44:10.337920 4899 scope.go:117] "RemoveContainer" containerID="3099674e5899f2afab27131de867dbe8e9c4097ae518db85bbe5579b94a9d058" Oct 03 08:44:10 crc kubenswrapper[4899]: E1003 08:44:10.338435 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3099674e5899f2afab27131de867dbe8e9c4097ae518db85bbe5579b94a9d058\": container with ID starting with 3099674e5899f2afab27131de867dbe8e9c4097ae518db85bbe5579b94a9d058 not found: ID does not exist" containerID="3099674e5899f2afab27131de867dbe8e9c4097ae518db85bbe5579b94a9d058" Oct 03 08:44:10 crc kubenswrapper[4899]: I1003 08:44:10.338477 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3099674e5899f2afab27131de867dbe8e9c4097ae518db85bbe5579b94a9d058"} err="failed to get container status \"3099674e5899f2afab27131de867dbe8e9c4097ae518db85bbe5579b94a9d058\": rpc error: code = NotFound desc = could not find container \"3099674e5899f2afab27131de867dbe8e9c4097ae518db85bbe5579b94a9d058\": container with ID starting with 3099674e5899f2afab27131de867dbe8e9c4097ae518db85bbe5579b94a9d058 not found: ID does not exist" Oct 03 08:44:10 crc kubenswrapper[4899]: I1003 08:44:10.338509 4899 scope.go:117] "RemoveContainer" containerID="d49e510d11422e0f1ad65b8f50caff5ca151a9396d8e4c1c7cf502a8f2d86798" Oct 03 08:44:10 crc kubenswrapper[4899]: E1003 08:44:10.338822 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d49e510d11422e0f1ad65b8f50caff5ca151a9396d8e4c1c7cf502a8f2d86798\": container with ID starting with d49e510d11422e0f1ad65b8f50caff5ca151a9396d8e4c1c7cf502a8f2d86798 not found: ID does not exist" containerID="d49e510d11422e0f1ad65b8f50caff5ca151a9396d8e4c1c7cf502a8f2d86798" Oct 03 08:44:10 crc kubenswrapper[4899]: I1003 08:44:10.338857 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d49e510d11422e0f1ad65b8f50caff5ca151a9396d8e4c1c7cf502a8f2d86798"} err="failed to get container status \"d49e510d11422e0f1ad65b8f50caff5ca151a9396d8e4c1c7cf502a8f2d86798\": rpc error: code = NotFound desc = could not find container \"d49e510d11422e0f1ad65b8f50caff5ca151a9396d8e4c1c7cf502a8f2d86798\": container with ID starting with d49e510d11422e0f1ad65b8f50caff5ca151a9396d8e4c1c7cf502a8f2d86798 not found: ID does not exist" Oct 03 08:44:10 crc kubenswrapper[4899]: I1003 08:44:10.338878 4899 scope.go:117] "RemoveContainer" containerID="e18e88fcd01b1eebbc5191d126ed4f3f7113c9d482bb45d162509b54aee9e802" Oct 03 08:44:10 crc kubenswrapper[4899]: E1003 08:44:10.339406 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e18e88fcd01b1eebbc5191d126ed4f3f7113c9d482bb45d162509b54aee9e802\": container with ID starting with e18e88fcd01b1eebbc5191d126ed4f3f7113c9d482bb45d162509b54aee9e802 not found: ID does not exist" containerID="e18e88fcd01b1eebbc5191d126ed4f3f7113c9d482bb45d162509b54aee9e802" Oct 03 08:44:10 crc kubenswrapper[4899]: I1003 08:44:10.339454 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e18e88fcd01b1eebbc5191d126ed4f3f7113c9d482bb45d162509b54aee9e802"} err="failed to get container status \"e18e88fcd01b1eebbc5191d126ed4f3f7113c9d482bb45d162509b54aee9e802\": rpc error: code = NotFound desc = could not find container \"e18e88fcd01b1eebbc5191d126ed4f3f7113c9d482bb45d162509b54aee9e802\": container with ID starting with e18e88fcd01b1eebbc5191d126ed4f3f7113c9d482bb45d162509b54aee9e802 not found: ID does not exist" Oct 03 08:44:10 crc kubenswrapper[4899]: I1003 08:44:10.543775 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81f9d59c-b58a-4c3a-a9a8-7377c5d17273" path="/var/lib/kubelet/pods/81f9d59c-b58a-4c3a-a9a8-7377c5d17273/volumes" Oct 03 08:44:10 crc kubenswrapper[4899]: I1003 08:44:10.544562 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a5429d2-4cf0-4d34-9bac-fee4008bd409" path="/var/lib/kubelet/pods/9a5429d2-4cf0-4d34-9bac-fee4008bd409/volumes" Oct 03 08:44:12 crc kubenswrapper[4899]: I1003 08:44:12.198432 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 08:44:12 crc kubenswrapper[4899]: I1003 08:44:12.198787 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 08:44:12 crc kubenswrapper[4899]: I1003 08:44:12.198849 4899 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:44:12 crc kubenswrapper[4899]: I1003 08:44:12.199539 4899 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395"} pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 08:44:12 crc kubenswrapper[4899]: I1003 08:44:12.199588 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" containerID="cri-o://1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395" gracePeriod=600 Oct 03 08:44:13 crc kubenswrapper[4899]: I1003 08:44:13.297782 4899 generic.go:334] "Generic (PLEG): container finished" podID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerID="1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395" exitCode=0 Oct 03 08:44:13 crc kubenswrapper[4899]: I1003 08:44:13.297860 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerDied","Data":"1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395"} Oct 03 08:44:13 crc kubenswrapper[4899]: I1003 08:44:13.298398 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerStarted","Data":"c575796f37c9e9cc7bfd1aa952849ac3387d60b1b8fe207a0308043edd984a52"} Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.023020 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" podUID="c459aabe-55b9-415c-8782-8a112e9ea466" containerName="oauth-openshift" containerID="cri-o://4e6f8a44b8364e344397f6d175bbd2accdfcda13496ae580a8fa62433e8cc05d" gracePeriod=15 Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.378670 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.392304 4899 generic.go:334] "Generic (PLEG): container finished" podID="c459aabe-55b9-415c-8782-8a112e9ea466" containerID="4e6f8a44b8364e344397f6d175bbd2accdfcda13496ae580a8fa62433e8cc05d" exitCode=0 Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.392376 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" event={"ID":"c459aabe-55b9-415c-8782-8a112e9ea466","Type":"ContainerDied","Data":"4e6f8a44b8364e344397f6d175bbd2accdfcda13496ae580a8fa62433e8cc05d"} Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.392416 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.392461 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-tkpkx" event={"ID":"c459aabe-55b9-415c-8782-8a112e9ea466","Type":"ContainerDied","Data":"f50b6bf7840fa270fc68e98f93b640126527b36734ba5fc86f7c3f169efa29a0"} Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.392483 4899 scope.go:117] "RemoveContainer" containerID="4e6f8a44b8364e344397f6d175bbd2accdfcda13496ae580a8fa62433e8cc05d" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409313 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-7cf78455b6-vw4xq"] Oct 03 08:44:32 crc kubenswrapper[4899]: E1003 08:44:32.409526 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea22580f-8ebf-46dc-adc0-c17174f4a096" containerName="registry-server" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409538 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea22580f-8ebf-46dc-adc0-c17174f4a096" containerName="registry-server" Oct 03 08:44:32 crc kubenswrapper[4899]: E1003 08:44:32.409549 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c459aabe-55b9-415c-8782-8a112e9ea466" containerName="oauth-openshift" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409555 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="c459aabe-55b9-415c-8782-8a112e9ea466" containerName="oauth-openshift" Oct 03 08:44:32 crc kubenswrapper[4899]: E1003 08:44:32.409562 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5013bd4-157b-4e1b-b1e7-df61922402cc" containerName="registry-server" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409569 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5013bd4-157b-4e1b-b1e7-df61922402cc" containerName="registry-server" Oct 03 08:44:32 crc kubenswrapper[4899]: E1003 08:44:32.409579 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81f9d59c-b58a-4c3a-a9a8-7377c5d17273" containerName="registry-server" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409585 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="81f9d59c-b58a-4c3a-a9a8-7377c5d17273" containerName="registry-server" Oct 03 08:44:32 crc kubenswrapper[4899]: E1003 08:44:32.409593 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="321b82b3-0aa0-409d-8a8a-8ea91085f407" containerName="pruner" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409600 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="321b82b3-0aa0-409d-8a8a-8ea91085f407" containerName="pruner" Oct 03 08:44:32 crc kubenswrapper[4899]: E1003 08:44:32.409610 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="822debfe-b242-48f7-82f3-5f35d3285775" containerName="pruner" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409615 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="822debfe-b242-48f7-82f3-5f35d3285775" containerName="pruner" Oct 03 08:44:32 crc kubenswrapper[4899]: E1003 08:44:32.409624 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5013bd4-157b-4e1b-b1e7-df61922402cc" containerName="extract-utilities" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409630 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5013bd4-157b-4e1b-b1e7-df61922402cc" containerName="extract-utilities" Oct 03 08:44:32 crc kubenswrapper[4899]: E1003 08:44:32.409641 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea22580f-8ebf-46dc-adc0-c17174f4a096" containerName="extract-content" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409647 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea22580f-8ebf-46dc-adc0-c17174f4a096" containerName="extract-content" Oct 03 08:44:32 crc kubenswrapper[4899]: E1003 08:44:32.409655 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a5429d2-4cf0-4d34-9bac-fee4008bd409" containerName="extract-utilities" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409660 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a5429d2-4cf0-4d34-9bac-fee4008bd409" containerName="extract-utilities" Oct 03 08:44:32 crc kubenswrapper[4899]: E1003 08:44:32.409672 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81f9d59c-b58a-4c3a-a9a8-7377c5d17273" containerName="extract-content" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409678 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="81f9d59c-b58a-4c3a-a9a8-7377c5d17273" containerName="extract-content" Oct 03 08:44:32 crc kubenswrapper[4899]: E1003 08:44:32.409687 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a5429d2-4cf0-4d34-9bac-fee4008bd409" containerName="registry-server" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409692 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a5429d2-4cf0-4d34-9bac-fee4008bd409" containerName="registry-server" Oct 03 08:44:32 crc kubenswrapper[4899]: E1003 08:44:32.409699 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5013bd4-157b-4e1b-b1e7-df61922402cc" containerName="extract-content" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409704 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5013bd4-157b-4e1b-b1e7-df61922402cc" containerName="extract-content" Oct 03 08:44:32 crc kubenswrapper[4899]: E1003 08:44:32.409713 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea22580f-8ebf-46dc-adc0-c17174f4a096" containerName="extract-utilities" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409718 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea22580f-8ebf-46dc-adc0-c17174f4a096" containerName="extract-utilities" Oct 03 08:44:32 crc kubenswrapper[4899]: E1003 08:44:32.409725 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81f9d59c-b58a-4c3a-a9a8-7377c5d17273" containerName="extract-utilities" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409732 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="81f9d59c-b58a-4c3a-a9a8-7377c5d17273" containerName="extract-utilities" Oct 03 08:44:32 crc kubenswrapper[4899]: E1003 08:44:32.409741 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a5429d2-4cf0-4d34-9bac-fee4008bd409" containerName="extract-content" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409746 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a5429d2-4cf0-4d34-9bac-fee4008bd409" containerName="extract-content" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409917 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="822debfe-b242-48f7-82f3-5f35d3285775" containerName="pruner" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409927 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea22580f-8ebf-46dc-adc0-c17174f4a096" containerName="registry-server" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409935 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="c459aabe-55b9-415c-8782-8a112e9ea466" containerName="oauth-openshift" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409943 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="81f9d59c-b58a-4c3a-a9a8-7377c5d17273" containerName="registry-server" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409950 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5013bd4-157b-4e1b-b1e7-df61922402cc" containerName="registry-server" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409961 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a5429d2-4cf0-4d34-9bac-fee4008bd409" containerName="registry-server" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.409969 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="321b82b3-0aa0-409d-8a8a-8ea91085f407" containerName="pruner" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.410416 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.421862 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7cf78455b6-vw4xq"] Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.437426 4899 scope.go:117] "RemoveContainer" containerID="4e6f8a44b8364e344397f6d175bbd2accdfcda13496ae580a8fa62433e8cc05d" Oct 03 08:44:32 crc kubenswrapper[4899]: E1003 08:44:32.438000 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e6f8a44b8364e344397f6d175bbd2accdfcda13496ae580a8fa62433e8cc05d\": container with ID starting with 4e6f8a44b8364e344397f6d175bbd2accdfcda13496ae580a8fa62433e8cc05d not found: ID does not exist" containerID="4e6f8a44b8364e344397f6d175bbd2accdfcda13496ae580a8fa62433e8cc05d" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.438058 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e6f8a44b8364e344397f6d175bbd2accdfcda13496ae580a8fa62433e8cc05d"} err="failed to get container status \"4e6f8a44b8364e344397f6d175bbd2accdfcda13496ae580a8fa62433e8cc05d\": rpc error: code = NotFound desc = could not find container \"4e6f8a44b8364e344397f6d175bbd2accdfcda13496ae580a8fa62433e8cc05d\": container with ID starting with 4e6f8a44b8364e344397f6d175bbd2accdfcda13496ae580a8fa62433e8cc05d not found: ID does not exist" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.476825 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-ocp-branding-template\") pod \"c459aabe-55b9-415c-8782-8a112e9ea466\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.476873 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-router-certs\") pod \"c459aabe-55b9-415c-8782-8a112e9ea466\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.476904 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-serving-cert\") pod \"c459aabe-55b9-415c-8782-8a112e9ea466\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.476922 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-audit-policies\") pod \"c459aabe-55b9-415c-8782-8a112e9ea466\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.476943 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-template-error\") pod \"c459aabe-55b9-415c-8782-8a112e9ea466\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.476958 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-idp-0-file-data\") pod \"c459aabe-55b9-415c-8782-8a112e9ea466\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.476978 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-cliconfig\") pod \"c459aabe-55b9-415c-8782-8a112e9ea466\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.477015 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-session\") pod \"c459aabe-55b9-415c-8782-8a112e9ea466\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.477050 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-template-provider-selection\") pod \"c459aabe-55b9-415c-8782-8a112e9ea466\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.477083 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c459aabe-55b9-415c-8782-8a112e9ea466-audit-dir\") pod \"c459aabe-55b9-415c-8782-8a112e9ea466\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.477103 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2z8pw\" (UniqueName: \"kubernetes.io/projected/c459aabe-55b9-415c-8782-8a112e9ea466-kube-api-access-2z8pw\") pod \"c459aabe-55b9-415c-8782-8a112e9ea466\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.477122 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-trusted-ca-bundle\") pod \"c459aabe-55b9-415c-8782-8a112e9ea466\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.477170 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c459aabe-55b9-415c-8782-8a112e9ea466-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "c459aabe-55b9-415c-8782-8a112e9ea466" (UID: "c459aabe-55b9-415c-8782-8a112e9ea466"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.477443 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-service-ca\") pod \"c459aabe-55b9-415c-8782-8a112e9ea466\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.477506 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-template-login\") pod \"c459aabe-55b9-415c-8782-8a112e9ea466\" (UID: \"c459aabe-55b9-415c-8782-8a112e9ea466\") " Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.478055 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "c459aabe-55b9-415c-8782-8a112e9ea466" (UID: "c459aabe-55b9-415c-8782-8a112e9ea466"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.478181 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "c459aabe-55b9-415c-8782-8a112e9ea466" (UID: "c459aabe-55b9-415c-8782-8a112e9ea466"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.478229 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "c459aabe-55b9-415c-8782-8a112e9ea466" (UID: "c459aabe-55b9-415c-8782-8a112e9ea466"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.480813 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "c459aabe-55b9-415c-8782-8a112e9ea466" (UID: "c459aabe-55b9-415c-8782-8a112e9ea466"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.482757 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "c459aabe-55b9-415c-8782-8a112e9ea466" (UID: "c459aabe-55b9-415c-8782-8a112e9ea466"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.483363 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c459aabe-55b9-415c-8782-8a112e9ea466-kube-api-access-2z8pw" (OuterVolumeSpecName: "kube-api-access-2z8pw") pod "c459aabe-55b9-415c-8782-8a112e9ea466" (UID: "c459aabe-55b9-415c-8782-8a112e9ea466"). InnerVolumeSpecName "kube-api-access-2z8pw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.489682 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.489745 4899 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c459aabe-55b9-415c-8782-8a112e9ea466-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.489781 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2z8pw\" (UniqueName: \"kubernetes.io/projected/c459aabe-55b9-415c-8782-8a112e9ea466-kube-api-access-2z8pw\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.489819 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.489847 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.489879 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.489932 4899 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c459aabe-55b9-415c-8782-8a112e9ea466-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.491344 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "c459aabe-55b9-415c-8782-8a112e9ea466" (UID: "c459aabe-55b9-415c-8782-8a112e9ea466"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.492140 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "c459aabe-55b9-415c-8782-8a112e9ea466" (UID: "c459aabe-55b9-415c-8782-8a112e9ea466"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.492578 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "c459aabe-55b9-415c-8782-8a112e9ea466" (UID: "c459aabe-55b9-415c-8782-8a112e9ea466"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.493468 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "c459aabe-55b9-415c-8782-8a112e9ea466" (UID: "c459aabe-55b9-415c-8782-8a112e9ea466"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.494436 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "c459aabe-55b9-415c-8782-8a112e9ea466" (UID: "c459aabe-55b9-415c-8782-8a112e9ea466"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.495183 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "c459aabe-55b9-415c-8782-8a112e9ea466" (UID: "c459aabe-55b9-415c-8782-8a112e9ea466"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.495400 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "c459aabe-55b9-415c-8782-8a112e9ea466" (UID: "c459aabe-55b9-415c-8782-8a112e9ea466"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.590779 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-audit-dir\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.590837 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-user-template-error\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.590861 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jq2hg\" (UniqueName: \"kubernetes.io/projected/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-kube-api-access-jq2hg\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.590885 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.590922 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-router-certs\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.590938 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-user-template-login\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.590956 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.591178 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-service-ca\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.592103 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.592131 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-session\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.592176 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.592241 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-audit-policies\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.592278 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.592303 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.592364 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.592376 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.592406 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.592416 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.592427 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.592436 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.592446 4899 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c459aabe-55b9-415c-8782-8a112e9ea466-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.693268 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-audit-dir\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.693326 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-user-template-error\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.693347 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jq2hg\" (UniqueName: \"kubernetes.io/projected/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-kube-api-access-jq2hg\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.693367 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.693387 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-router-certs\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.693404 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-user-template-login\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.693437 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.693457 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-service-ca\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.693477 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.693496 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-session\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.693481 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-audit-dir\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.693516 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.693735 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-audit-policies\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.693788 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.693850 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.694807 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-service-ca\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.695402 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-audit-policies\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.696668 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.696747 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.699125 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.699158 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-session\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.699349 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-router-certs\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.699491 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.699486 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.699606 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.703341 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-user-template-error\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.709810 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-v4-0-config-user-template-login\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.713344 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jq2hg\" (UniqueName: \"kubernetes.io/projected/b9d5da8f-4c9b-4f31-abb1-993dce2b12fa-kube-api-access-jq2hg\") pod \"oauth-openshift-7cf78455b6-vw4xq\" (UID: \"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa\") " pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.716800 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tkpkx"] Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.719857 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tkpkx"] Oct 03 08:44:32 crc kubenswrapper[4899]: I1003 08:44:32.742399 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:33 crc kubenswrapper[4899]: I1003 08:44:33.126184 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7cf78455b6-vw4xq"] Oct 03 08:44:33 crc kubenswrapper[4899]: I1003 08:44:33.399654 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" event={"ID":"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa","Type":"ContainerStarted","Data":"0805cbc9ef6d428fb9d5ed4b05d695233df596f3d99c758ecd4daec592adde9f"} Oct 03 08:44:34 crc kubenswrapper[4899]: I1003 08:44:34.407855 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" event={"ID":"b9d5da8f-4c9b-4f31-abb1-993dce2b12fa","Type":"ContainerStarted","Data":"a62b46fcfa2d042ad70f5593c0ae0d6c683bf1365ea1d784c910f12c82fb505f"} Oct 03 08:44:34 crc kubenswrapper[4899]: I1003 08:44:34.408156 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:34 crc kubenswrapper[4899]: I1003 08:44:34.412720 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" Oct 03 08:44:34 crc kubenswrapper[4899]: I1003 08:44:34.430655 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7cf78455b6-vw4xq" podStartSLOduration=28.43063162 podStartE2EDuration="28.43063162s" podCreationTimestamp="2025-10-03 08:44:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:44:34.424753654 +0000 UTC m=+248.532238627" watchObservedRunningTime="2025-10-03 08:44:34.43063162 +0000 UTC m=+248.538116573" Oct 03 08:44:34 crc kubenswrapper[4899]: I1003 08:44:34.534171 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c459aabe-55b9-415c-8782-8a112e9ea466" path="/var/lib/kubelet/pods/c459aabe-55b9-415c-8782-8a112e9ea466/volumes" Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.540596 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5xplz"] Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.542482 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jtp4v"] Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.542861 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jtp4v" podUID="49764d57-9b3d-4097-a2f4-08a363c6a25f" containerName="registry-server" containerID="cri-o://f9f7c681a07a7b83ef38983939eed4f7e6d396db27d02184a12f450f6632573c" gracePeriod=30 Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.543151 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5xplz" podUID="85fb2d07-4784-45b8-952e-2b12d61ea024" containerName="registry-server" containerID="cri-o://9cc116110d155f6971cc17c1a6d3931c2e27ff8547636e4325df1f2bc5b6af39" gracePeriod=30 Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.547282 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-np5qp"] Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.547816 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" podUID="a4456e30-3eb3-4e1a-b22d-8888babf06a9" containerName="marketplace-operator" containerID="cri-o://05db358dffce67ac8f6ccc58b2d2b68e7d4865d099c2c4c98067f30b1c1d8593" gracePeriod=30 Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.571002 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4lpnw"] Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.571352 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4lpnw" podUID="ad6ce9b2-60bf-492e-b583-9147a171e7bd" containerName="registry-server" containerID="cri-o://3ac769f0fe0af8c0ab61b0681817208913f41f014c16bd1e93edaa2458a1118a" gracePeriod=30 Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.586181 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-stjfx"] Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.586835 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-stjfx" Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.600723 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b99vh"] Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.601089 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-b99vh" podUID="4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f" containerName="registry-server" containerID="cri-o://7cd91724e17915e17f3f40b9b7fc0c33fbf64850da730fd8ed9ece3263eead8c" gracePeriod=30 Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.616520 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-stjfx"] Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.746671 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clddn\" (UniqueName: \"kubernetes.io/projected/2a4e1059-563b-443d-a7ce-d8af764e8900-kube-api-access-clddn\") pod \"marketplace-operator-79b997595-stjfx\" (UID: \"2a4e1059-563b-443d-a7ce-d8af764e8900\") " pod="openshift-marketplace/marketplace-operator-79b997595-stjfx" Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.746716 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2a4e1059-563b-443d-a7ce-d8af764e8900-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-stjfx\" (UID: \"2a4e1059-563b-443d-a7ce-d8af764e8900\") " pod="openshift-marketplace/marketplace-operator-79b997595-stjfx" Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.746787 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2a4e1059-563b-443d-a7ce-d8af764e8900-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-stjfx\" (UID: \"2a4e1059-563b-443d-a7ce-d8af764e8900\") " pod="openshift-marketplace/marketplace-operator-79b997595-stjfx" Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.848568 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2a4e1059-563b-443d-a7ce-d8af764e8900-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-stjfx\" (UID: \"2a4e1059-563b-443d-a7ce-d8af764e8900\") " pod="openshift-marketplace/marketplace-operator-79b997595-stjfx" Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.848851 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clddn\" (UniqueName: \"kubernetes.io/projected/2a4e1059-563b-443d-a7ce-d8af764e8900-kube-api-access-clddn\") pod \"marketplace-operator-79b997595-stjfx\" (UID: \"2a4e1059-563b-443d-a7ce-d8af764e8900\") " pod="openshift-marketplace/marketplace-operator-79b997595-stjfx" Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.848909 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2a4e1059-563b-443d-a7ce-d8af764e8900-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-stjfx\" (UID: \"2a4e1059-563b-443d-a7ce-d8af764e8900\") " pod="openshift-marketplace/marketplace-operator-79b997595-stjfx" Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.851200 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2a4e1059-563b-443d-a7ce-d8af764e8900-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-stjfx\" (UID: \"2a4e1059-563b-443d-a7ce-d8af764e8900\") " pod="openshift-marketplace/marketplace-operator-79b997595-stjfx" Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.868286 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2a4e1059-563b-443d-a7ce-d8af764e8900-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-stjfx\" (UID: \"2a4e1059-563b-443d-a7ce-d8af764e8900\") " pod="openshift-marketplace/marketplace-operator-79b997595-stjfx" Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.872452 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clddn\" (UniqueName: \"kubernetes.io/projected/2a4e1059-563b-443d-a7ce-d8af764e8900-kube-api-access-clddn\") pod \"marketplace-operator-79b997595-stjfx\" (UID: \"2a4e1059-563b-443d-a7ce-d8af764e8900\") " pod="openshift-marketplace/marketplace-operator-79b997595-stjfx" Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.989276 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-stjfx" Oct 03 08:44:44 crc kubenswrapper[4899]: I1003 08:44:44.999479 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.007238 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4lpnw" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.023659 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jtp4v" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.044201 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5xplz" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.049597 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b99vh" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.152724 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85fb2d07-4784-45b8-952e-2b12d61ea024-utilities\") pod \"85fb2d07-4784-45b8-952e-2b12d61ea024\" (UID: \"85fb2d07-4784-45b8-952e-2b12d61ea024\") " Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.152766 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad6ce9b2-60bf-492e-b583-9147a171e7bd-utilities\") pod \"ad6ce9b2-60bf-492e-b583-9147a171e7bd\" (UID: \"ad6ce9b2-60bf-492e-b583-9147a171e7bd\") " Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.152792 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a4456e30-3eb3-4e1a-b22d-8888babf06a9-marketplace-operator-metrics\") pod \"a4456e30-3eb3-4e1a-b22d-8888babf06a9\" (UID: \"a4456e30-3eb3-4e1a-b22d-8888babf06a9\") " Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.152818 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkn7n\" (UniqueName: \"kubernetes.io/projected/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f-kube-api-access-kkn7n\") pod \"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f\" (UID: \"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f\") " Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.152878 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49764d57-9b3d-4097-a2f4-08a363c6a25f-utilities\") pod \"49764d57-9b3d-4097-a2f4-08a363c6a25f\" (UID: \"49764d57-9b3d-4097-a2f4-08a363c6a25f\") " Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.152921 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f-utilities\") pod \"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f\" (UID: \"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f\") " Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.152955 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85fb2d07-4784-45b8-952e-2b12d61ea024-catalog-content\") pod \"85fb2d07-4784-45b8-952e-2b12d61ea024\" (UID: \"85fb2d07-4784-45b8-952e-2b12d61ea024\") " Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.152974 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7p6pj\" (UniqueName: \"kubernetes.io/projected/ad6ce9b2-60bf-492e-b583-9147a171e7bd-kube-api-access-7p6pj\") pod \"ad6ce9b2-60bf-492e-b583-9147a171e7bd\" (UID: \"ad6ce9b2-60bf-492e-b583-9147a171e7bd\") " Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.152994 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f-catalog-content\") pod \"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f\" (UID: \"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f\") " Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.153016 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-775ml\" (UniqueName: \"kubernetes.io/projected/49764d57-9b3d-4097-a2f4-08a363c6a25f-kube-api-access-775ml\") pod \"49764d57-9b3d-4097-a2f4-08a363c6a25f\" (UID: \"49764d57-9b3d-4097-a2f4-08a363c6a25f\") " Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.153040 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lsxj6\" (UniqueName: \"kubernetes.io/projected/a4456e30-3eb3-4e1a-b22d-8888babf06a9-kube-api-access-lsxj6\") pod \"a4456e30-3eb3-4e1a-b22d-8888babf06a9\" (UID: \"a4456e30-3eb3-4e1a-b22d-8888babf06a9\") " Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.153064 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pr49d\" (UniqueName: \"kubernetes.io/projected/85fb2d07-4784-45b8-952e-2b12d61ea024-kube-api-access-pr49d\") pod \"85fb2d07-4784-45b8-952e-2b12d61ea024\" (UID: \"85fb2d07-4784-45b8-952e-2b12d61ea024\") " Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.153083 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad6ce9b2-60bf-492e-b583-9147a171e7bd-catalog-content\") pod \"ad6ce9b2-60bf-492e-b583-9147a171e7bd\" (UID: \"ad6ce9b2-60bf-492e-b583-9147a171e7bd\") " Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.153108 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4456e30-3eb3-4e1a-b22d-8888babf06a9-marketplace-trusted-ca\") pod \"a4456e30-3eb3-4e1a-b22d-8888babf06a9\" (UID: \"a4456e30-3eb3-4e1a-b22d-8888babf06a9\") " Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.153126 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49764d57-9b3d-4097-a2f4-08a363c6a25f-catalog-content\") pod \"49764d57-9b3d-4097-a2f4-08a363c6a25f\" (UID: \"49764d57-9b3d-4097-a2f4-08a363c6a25f\") " Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.154050 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49764d57-9b3d-4097-a2f4-08a363c6a25f-utilities" (OuterVolumeSpecName: "utilities") pod "49764d57-9b3d-4097-a2f4-08a363c6a25f" (UID: "49764d57-9b3d-4097-a2f4-08a363c6a25f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.154153 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f-utilities" (OuterVolumeSpecName: "utilities") pod "4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f" (UID: "4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.154756 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85fb2d07-4784-45b8-952e-2b12d61ea024-utilities" (OuterVolumeSpecName: "utilities") pod "85fb2d07-4784-45b8-952e-2b12d61ea024" (UID: "85fb2d07-4784-45b8-952e-2b12d61ea024"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.154871 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad6ce9b2-60bf-492e-b583-9147a171e7bd-utilities" (OuterVolumeSpecName: "utilities") pod "ad6ce9b2-60bf-492e-b583-9147a171e7bd" (UID: "ad6ce9b2-60bf-492e-b583-9147a171e7bd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.157628 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4456e30-3eb3-4e1a-b22d-8888babf06a9-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "a4456e30-3eb3-4e1a-b22d-8888babf06a9" (UID: "a4456e30-3eb3-4e1a-b22d-8888babf06a9"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.158397 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4456e30-3eb3-4e1a-b22d-8888babf06a9-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "a4456e30-3eb3-4e1a-b22d-8888babf06a9" (UID: "a4456e30-3eb3-4e1a-b22d-8888babf06a9"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.159542 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85fb2d07-4784-45b8-952e-2b12d61ea024-kube-api-access-pr49d" (OuterVolumeSpecName: "kube-api-access-pr49d") pod "85fb2d07-4784-45b8-952e-2b12d61ea024" (UID: "85fb2d07-4784-45b8-952e-2b12d61ea024"). InnerVolumeSpecName "kube-api-access-pr49d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.161011 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f-kube-api-access-kkn7n" (OuterVolumeSpecName: "kube-api-access-kkn7n") pod "4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f" (UID: "4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f"). InnerVolumeSpecName "kube-api-access-kkn7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.161141 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49764d57-9b3d-4097-a2f4-08a363c6a25f-kube-api-access-775ml" (OuterVolumeSpecName: "kube-api-access-775ml") pod "49764d57-9b3d-4097-a2f4-08a363c6a25f" (UID: "49764d57-9b3d-4097-a2f4-08a363c6a25f"). InnerVolumeSpecName "kube-api-access-775ml". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.162873 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad6ce9b2-60bf-492e-b583-9147a171e7bd-kube-api-access-7p6pj" (OuterVolumeSpecName: "kube-api-access-7p6pj") pod "ad6ce9b2-60bf-492e-b583-9147a171e7bd" (UID: "ad6ce9b2-60bf-492e-b583-9147a171e7bd"). InnerVolumeSpecName "kube-api-access-7p6pj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.163672 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4456e30-3eb3-4e1a-b22d-8888babf06a9-kube-api-access-lsxj6" (OuterVolumeSpecName: "kube-api-access-lsxj6") pod "a4456e30-3eb3-4e1a-b22d-8888babf06a9" (UID: "a4456e30-3eb3-4e1a-b22d-8888babf06a9"). InnerVolumeSpecName "kube-api-access-lsxj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.186071 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad6ce9b2-60bf-492e-b583-9147a171e7bd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ad6ce9b2-60bf-492e-b583-9147a171e7bd" (UID: "ad6ce9b2-60bf-492e-b583-9147a171e7bd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.215515 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-stjfx"] Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.235980 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85fb2d07-4784-45b8-952e-2b12d61ea024-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "85fb2d07-4784-45b8-952e-2b12d61ea024" (UID: "85fb2d07-4784-45b8-952e-2b12d61ea024"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.247810 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49764d57-9b3d-4097-a2f4-08a363c6a25f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "49764d57-9b3d-4097-a2f4-08a363c6a25f" (UID: "49764d57-9b3d-4097-a2f4-08a363c6a25f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.254153 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49764d57-9b3d-4097-a2f4-08a363c6a25f-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.254185 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.254194 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85fb2d07-4784-45b8-952e-2b12d61ea024-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.254204 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7p6pj\" (UniqueName: \"kubernetes.io/projected/ad6ce9b2-60bf-492e-b583-9147a171e7bd-kube-api-access-7p6pj\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.254214 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-775ml\" (UniqueName: \"kubernetes.io/projected/49764d57-9b3d-4097-a2f4-08a363c6a25f-kube-api-access-775ml\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.254222 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lsxj6\" (UniqueName: \"kubernetes.io/projected/a4456e30-3eb3-4e1a-b22d-8888babf06a9-kube-api-access-lsxj6\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.254248 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pr49d\" (UniqueName: \"kubernetes.io/projected/85fb2d07-4784-45b8-952e-2b12d61ea024-kube-api-access-pr49d\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.254257 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad6ce9b2-60bf-492e-b583-9147a171e7bd-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.254266 4899 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4456e30-3eb3-4e1a-b22d-8888babf06a9-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.254274 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49764d57-9b3d-4097-a2f4-08a363c6a25f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.254281 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85fb2d07-4784-45b8-952e-2b12d61ea024-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.254289 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad6ce9b2-60bf-492e-b583-9147a171e7bd-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.254297 4899 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a4456e30-3eb3-4e1a-b22d-8888babf06a9-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.254307 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkn7n\" (UniqueName: \"kubernetes.io/projected/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f-kube-api-access-kkn7n\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.301329 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f" (UID: "4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.355067 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.464321 4899 generic.go:334] "Generic (PLEG): container finished" podID="4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f" containerID="7cd91724e17915e17f3f40b9b7fc0c33fbf64850da730fd8ed9ece3263eead8c" exitCode=0 Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.464417 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b99vh" event={"ID":"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f","Type":"ContainerDied","Data":"7cd91724e17915e17f3f40b9b7fc0c33fbf64850da730fd8ed9ece3263eead8c"} Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.464490 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b99vh" event={"ID":"4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f","Type":"ContainerDied","Data":"3f94b371a270af50e326d00cc32e3d58101a2533ba2b25a6090608d534b7623b"} Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.464512 4899 scope.go:117] "RemoveContainer" containerID="7cd91724e17915e17f3f40b9b7fc0c33fbf64850da730fd8ed9ece3263eead8c" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.464709 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b99vh" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.466422 4899 generic.go:334] "Generic (PLEG): container finished" podID="ad6ce9b2-60bf-492e-b583-9147a171e7bd" containerID="3ac769f0fe0af8c0ab61b0681817208913f41f014c16bd1e93edaa2458a1118a" exitCode=0 Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.466483 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4lpnw" event={"ID":"ad6ce9b2-60bf-492e-b583-9147a171e7bd","Type":"ContainerDied","Data":"3ac769f0fe0af8c0ab61b0681817208913f41f014c16bd1e93edaa2458a1118a"} Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.466514 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4lpnw" event={"ID":"ad6ce9b2-60bf-492e-b583-9147a171e7bd","Type":"ContainerDied","Data":"779d62d47eecb63473b8ce6e5662ae05b55d05674203669837a5705c3cf08ed0"} Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.466539 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4lpnw" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.468311 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-stjfx" event={"ID":"2a4e1059-563b-443d-a7ce-d8af764e8900","Type":"ContainerStarted","Data":"d93274556385fcc9f7c8d23a9d31983f0af2973762cb48df7b58c65e8ebac5cc"} Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.468347 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-stjfx" event={"ID":"2a4e1059-563b-443d-a7ce-d8af764e8900","Type":"ContainerStarted","Data":"1fc5a4268ecf900e6d4407fef38d7346c98c34d56a73c80a3111ca118152a21c"} Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.469490 4899 generic.go:334] "Generic (PLEG): container finished" podID="a4456e30-3eb3-4e1a-b22d-8888babf06a9" containerID="05db358dffce67ac8f6ccc58b2d2b68e7d4865d099c2c4c98067f30b1c1d8593" exitCode=0 Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.469550 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" event={"ID":"a4456e30-3eb3-4e1a-b22d-8888babf06a9","Type":"ContainerDied","Data":"05db358dffce67ac8f6ccc58b2d2b68e7d4865d099c2c4c98067f30b1c1d8593"} Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.469588 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" event={"ID":"a4456e30-3eb3-4e1a-b22d-8888babf06a9","Type":"ContainerDied","Data":"414a450044bc41863042b259ece734bb8af36b89ce37c51e08f292d8dee2290c"} Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.469645 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-np5qp" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.479257 4899 scope.go:117] "RemoveContainer" containerID="a51276e5016408f8784e77930f96f4e5211256a4bd5a4e6caf35dacbb52df685" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.480476 4899 generic.go:334] "Generic (PLEG): container finished" podID="85fb2d07-4784-45b8-952e-2b12d61ea024" containerID="9cc116110d155f6971cc17c1a6d3931c2e27ff8547636e4325df1f2bc5b6af39" exitCode=0 Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.480550 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xplz" event={"ID":"85fb2d07-4784-45b8-952e-2b12d61ea024","Type":"ContainerDied","Data":"9cc116110d155f6971cc17c1a6d3931c2e27ff8547636e4325df1f2bc5b6af39"} Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.480557 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5xplz" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.480579 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xplz" event={"ID":"85fb2d07-4784-45b8-952e-2b12d61ea024","Type":"ContainerDied","Data":"993b67686c8e101800c62af21ea4829014eb07b6f974ea9fba3537ab24ff55cd"} Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.488182 4899 generic.go:334] "Generic (PLEG): container finished" podID="49764d57-9b3d-4097-a2f4-08a363c6a25f" containerID="f9f7c681a07a7b83ef38983939eed4f7e6d396db27d02184a12f450f6632573c" exitCode=0 Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.488221 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jtp4v" event={"ID":"49764d57-9b3d-4097-a2f4-08a363c6a25f","Type":"ContainerDied","Data":"f9f7c681a07a7b83ef38983939eed4f7e6d396db27d02184a12f450f6632573c"} Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.488248 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jtp4v" event={"ID":"49764d57-9b3d-4097-a2f4-08a363c6a25f","Type":"ContainerDied","Data":"acace7f15f38368b237a8025c6ea0772fe22591fd4d5c205ecc4ef1cd771bc42"} Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.488317 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jtp4v" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.508842 4899 scope.go:117] "RemoveContainer" containerID="03a6fb695d6b4e00a8ae647b0e29105c8fd46566df721ee3d2bbf9489fc34460" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.519707 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-stjfx" podStartSLOduration=1.519645629 podStartE2EDuration="1.519645629s" podCreationTimestamp="2025-10-03 08:44:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:44:45.507974959 +0000 UTC m=+259.615459922" watchObservedRunningTime="2025-10-03 08:44:45.519645629 +0000 UTC m=+259.627130592" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.524863 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5xplz"] Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.528552 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5xplz"] Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.534241 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4lpnw"] Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.538508 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4lpnw"] Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.547076 4899 scope.go:117] "RemoveContainer" containerID="7cd91724e17915e17f3f40b9b7fc0c33fbf64850da730fd8ed9ece3263eead8c" Oct 03 08:44:45 crc kubenswrapper[4899]: E1003 08:44:45.548559 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cd91724e17915e17f3f40b9b7fc0c33fbf64850da730fd8ed9ece3263eead8c\": container with ID starting with 7cd91724e17915e17f3f40b9b7fc0c33fbf64850da730fd8ed9ece3263eead8c not found: ID does not exist" containerID="7cd91724e17915e17f3f40b9b7fc0c33fbf64850da730fd8ed9ece3263eead8c" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.548694 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cd91724e17915e17f3f40b9b7fc0c33fbf64850da730fd8ed9ece3263eead8c"} err="failed to get container status \"7cd91724e17915e17f3f40b9b7fc0c33fbf64850da730fd8ed9ece3263eead8c\": rpc error: code = NotFound desc = could not find container \"7cd91724e17915e17f3f40b9b7fc0c33fbf64850da730fd8ed9ece3263eead8c\": container with ID starting with 7cd91724e17915e17f3f40b9b7fc0c33fbf64850da730fd8ed9ece3263eead8c not found: ID does not exist" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.548782 4899 scope.go:117] "RemoveContainer" containerID="a51276e5016408f8784e77930f96f4e5211256a4bd5a4e6caf35dacbb52df685" Oct 03 08:44:45 crc kubenswrapper[4899]: E1003 08:44:45.550418 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a51276e5016408f8784e77930f96f4e5211256a4bd5a4e6caf35dacbb52df685\": container with ID starting with a51276e5016408f8784e77930f96f4e5211256a4bd5a4e6caf35dacbb52df685 not found: ID does not exist" containerID="a51276e5016408f8784e77930f96f4e5211256a4bd5a4e6caf35dacbb52df685" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.550445 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a51276e5016408f8784e77930f96f4e5211256a4bd5a4e6caf35dacbb52df685"} err="failed to get container status \"a51276e5016408f8784e77930f96f4e5211256a4bd5a4e6caf35dacbb52df685\": rpc error: code = NotFound desc = could not find container \"a51276e5016408f8784e77930f96f4e5211256a4bd5a4e6caf35dacbb52df685\": container with ID starting with a51276e5016408f8784e77930f96f4e5211256a4bd5a4e6caf35dacbb52df685 not found: ID does not exist" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.550460 4899 scope.go:117] "RemoveContainer" containerID="03a6fb695d6b4e00a8ae647b0e29105c8fd46566df721ee3d2bbf9489fc34460" Oct 03 08:44:45 crc kubenswrapper[4899]: E1003 08:44:45.551002 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03a6fb695d6b4e00a8ae647b0e29105c8fd46566df721ee3d2bbf9489fc34460\": container with ID starting with 03a6fb695d6b4e00a8ae647b0e29105c8fd46566df721ee3d2bbf9489fc34460 not found: ID does not exist" containerID="03a6fb695d6b4e00a8ae647b0e29105c8fd46566df721ee3d2bbf9489fc34460" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.551022 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03a6fb695d6b4e00a8ae647b0e29105c8fd46566df721ee3d2bbf9489fc34460"} err="failed to get container status \"03a6fb695d6b4e00a8ae647b0e29105c8fd46566df721ee3d2bbf9489fc34460\": rpc error: code = NotFound desc = could not find container \"03a6fb695d6b4e00a8ae647b0e29105c8fd46566df721ee3d2bbf9489fc34460\": container with ID starting with 03a6fb695d6b4e00a8ae647b0e29105c8fd46566df721ee3d2bbf9489fc34460 not found: ID does not exist" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.551037 4899 scope.go:117] "RemoveContainer" containerID="3ac769f0fe0af8c0ab61b0681817208913f41f014c16bd1e93edaa2458a1118a" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.553134 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-np5qp"] Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.556577 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-np5qp"] Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.564738 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b99vh"] Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.574295 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-b99vh"] Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.574700 4899 scope.go:117] "RemoveContainer" containerID="07e37717892ce6ad14d5a5bd5ff091e5d5d421efa40e73f1a976094aeae33c3a" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.580206 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jtp4v"] Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.583200 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jtp4v"] Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.589620 4899 scope.go:117] "RemoveContainer" containerID="34307bec13d1158bfbe16358576caf750ba815e79710241defc4a5a04b4bd6a4" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.605763 4899 scope.go:117] "RemoveContainer" containerID="3ac769f0fe0af8c0ab61b0681817208913f41f014c16bd1e93edaa2458a1118a" Oct 03 08:44:45 crc kubenswrapper[4899]: E1003 08:44:45.606340 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ac769f0fe0af8c0ab61b0681817208913f41f014c16bd1e93edaa2458a1118a\": container with ID starting with 3ac769f0fe0af8c0ab61b0681817208913f41f014c16bd1e93edaa2458a1118a not found: ID does not exist" containerID="3ac769f0fe0af8c0ab61b0681817208913f41f014c16bd1e93edaa2458a1118a" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.606386 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ac769f0fe0af8c0ab61b0681817208913f41f014c16bd1e93edaa2458a1118a"} err="failed to get container status \"3ac769f0fe0af8c0ab61b0681817208913f41f014c16bd1e93edaa2458a1118a\": rpc error: code = NotFound desc = could not find container \"3ac769f0fe0af8c0ab61b0681817208913f41f014c16bd1e93edaa2458a1118a\": container with ID starting with 3ac769f0fe0af8c0ab61b0681817208913f41f014c16bd1e93edaa2458a1118a not found: ID does not exist" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.606420 4899 scope.go:117] "RemoveContainer" containerID="07e37717892ce6ad14d5a5bd5ff091e5d5d421efa40e73f1a976094aeae33c3a" Oct 03 08:44:45 crc kubenswrapper[4899]: E1003 08:44:45.606828 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07e37717892ce6ad14d5a5bd5ff091e5d5d421efa40e73f1a976094aeae33c3a\": container with ID starting with 07e37717892ce6ad14d5a5bd5ff091e5d5d421efa40e73f1a976094aeae33c3a not found: ID does not exist" containerID="07e37717892ce6ad14d5a5bd5ff091e5d5d421efa40e73f1a976094aeae33c3a" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.606858 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07e37717892ce6ad14d5a5bd5ff091e5d5d421efa40e73f1a976094aeae33c3a"} err="failed to get container status \"07e37717892ce6ad14d5a5bd5ff091e5d5d421efa40e73f1a976094aeae33c3a\": rpc error: code = NotFound desc = could not find container \"07e37717892ce6ad14d5a5bd5ff091e5d5d421efa40e73f1a976094aeae33c3a\": container with ID starting with 07e37717892ce6ad14d5a5bd5ff091e5d5d421efa40e73f1a976094aeae33c3a not found: ID does not exist" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.606875 4899 scope.go:117] "RemoveContainer" containerID="34307bec13d1158bfbe16358576caf750ba815e79710241defc4a5a04b4bd6a4" Oct 03 08:44:45 crc kubenswrapper[4899]: E1003 08:44:45.607450 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34307bec13d1158bfbe16358576caf750ba815e79710241defc4a5a04b4bd6a4\": container with ID starting with 34307bec13d1158bfbe16358576caf750ba815e79710241defc4a5a04b4bd6a4 not found: ID does not exist" containerID="34307bec13d1158bfbe16358576caf750ba815e79710241defc4a5a04b4bd6a4" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.607496 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34307bec13d1158bfbe16358576caf750ba815e79710241defc4a5a04b4bd6a4"} err="failed to get container status \"34307bec13d1158bfbe16358576caf750ba815e79710241defc4a5a04b4bd6a4\": rpc error: code = NotFound desc = could not find container \"34307bec13d1158bfbe16358576caf750ba815e79710241defc4a5a04b4bd6a4\": container with ID starting with 34307bec13d1158bfbe16358576caf750ba815e79710241defc4a5a04b4bd6a4 not found: ID does not exist" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.607527 4899 scope.go:117] "RemoveContainer" containerID="05db358dffce67ac8f6ccc58b2d2b68e7d4865d099c2c4c98067f30b1c1d8593" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.624323 4899 scope.go:117] "RemoveContainer" containerID="05db358dffce67ac8f6ccc58b2d2b68e7d4865d099c2c4c98067f30b1c1d8593" Oct 03 08:44:45 crc kubenswrapper[4899]: E1003 08:44:45.624865 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05db358dffce67ac8f6ccc58b2d2b68e7d4865d099c2c4c98067f30b1c1d8593\": container with ID starting with 05db358dffce67ac8f6ccc58b2d2b68e7d4865d099c2c4c98067f30b1c1d8593 not found: ID does not exist" containerID="05db358dffce67ac8f6ccc58b2d2b68e7d4865d099c2c4c98067f30b1c1d8593" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.624933 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05db358dffce67ac8f6ccc58b2d2b68e7d4865d099c2c4c98067f30b1c1d8593"} err="failed to get container status \"05db358dffce67ac8f6ccc58b2d2b68e7d4865d099c2c4c98067f30b1c1d8593\": rpc error: code = NotFound desc = could not find container \"05db358dffce67ac8f6ccc58b2d2b68e7d4865d099c2c4c98067f30b1c1d8593\": container with ID starting with 05db358dffce67ac8f6ccc58b2d2b68e7d4865d099c2c4c98067f30b1c1d8593 not found: ID does not exist" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.624966 4899 scope.go:117] "RemoveContainer" containerID="9cc116110d155f6971cc17c1a6d3931c2e27ff8547636e4325df1f2bc5b6af39" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.639182 4899 scope.go:117] "RemoveContainer" containerID="50edf9325ac4ab45aad67d24980095054f1ec58586099ef0f034c98e62008509" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.654449 4899 scope.go:117] "RemoveContainer" containerID="ffb9ed9642f199e0f72618a43fd861d41ffbd270bbb93ab04116fd28be218963" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.670986 4899 scope.go:117] "RemoveContainer" containerID="9cc116110d155f6971cc17c1a6d3931c2e27ff8547636e4325df1f2bc5b6af39" Oct 03 08:44:45 crc kubenswrapper[4899]: E1003 08:44:45.671534 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9cc116110d155f6971cc17c1a6d3931c2e27ff8547636e4325df1f2bc5b6af39\": container with ID starting with 9cc116110d155f6971cc17c1a6d3931c2e27ff8547636e4325df1f2bc5b6af39 not found: ID does not exist" containerID="9cc116110d155f6971cc17c1a6d3931c2e27ff8547636e4325df1f2bc5b6af39" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.671731 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cc116110d155f6971cc17c1a6d3931c2e27ff8547636e4325df1f2bc5b6af39"} err="failed to get container status \"9cc116110d155f6971cc17c1a6d3931c2e27ff8547636e4325df1f2bc5b6af39\": rpc error: code = NotFound desc = could not find container \"9cc116110d155f6971cc17c1a6d3931c2e27ff8547636e4325df1f2bc5b6af39\": container with ID starting with 9cc116110d155f6971cc17c1a6d3931c2e27ff8547636e4325df1f2bc5b6af39 not found: ID does not exist" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.671807 4899 scope.go:117] "RemoveContainer" containerID="50edf9325ac4ab45aad67d24980095054f1ec58586099ef0f034c98e62008509" Oct 03 08:44:45 crc kubenswrapper[4899]: E1003 08:44:45.672310 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50edf9325ac4ab45aad67d24980095054f1ec58586099ef0f034c98e62008509\": container with ID starting with 50edf9325ac4ab45aad67d24980095054f1ec58586099ef0f034c98e62008509 not found: ID does not exist" containerID="50edf9325ac4ab45aad67d24980095054f1ec58586099ef0f034c98e62008509" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.672348 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50edf9325ac4ab45aad67d24980095054f1ec58586099ef0f034c98e62008509"} err="failed to get container status \"50edf9325ac4ab45aad67d24980095054f1ec58586099ef0f034c98e62008509\": rpc error: code = NotFound desc = could not find container \"50edf9325ac4ab45aad67d24980095054f1ec58586099ef0f034c98e62008509\": container with ID starting with 50edf9325ac4ab45aad67d24980095054f1ec58586099ef0f034c98e62008509 not found: ID does not exist" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.672375 4899 scope.go:117] "RemoveContainer" containerID="ffb9ed9642f199e0f72618a43fd861d41ffbd270bbb93ab04116fd28be218963" Oct 03 08:44:45 crc kubenswrapper[4899]: E1003 08:44:45.672770 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffb9ed9642f199e0f72618a43fd861d41ffbd270bbb93ab04116fd28be218963\": container with ID starting with ffb9ed9642f199e0f72618a43fd861d41ffbd270bbb93ab04116fd28be218963 not found: ID does not exist" containerID="ffb9ed9642f199e0f72618a43fd861d41ffbd270bbb93ab04116fd28be218963" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.672800 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffb9ed9642f199e0f72618a43fd861d41ffbd270bbb93ab04116fd28be218963"} err="failed to get container status \"ffb9ed9642f199e0f72618a43fd861d41ffbd270bbb93ab04116fd28be218963\": rpc error: code = NotFound desc = could not find container \"ffb9ed9642f199e0f72618a43fd861d41ffbd270bbb93ab04116fd28be218963\": container with ID starting with ffb9ed9642f199e0f72618a43fd861d41ffbd270bbb93ab04116fd28be218963 not found: ID does not exist" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.672820 4899 scope.go:117] "RemoveContainer" containerID="f9f7c681a07a7b83ef38983939eed4f7e6d396db27d02184a12f450f6632573c" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.687273 4899 scope.go:117] "RemoveContainer" containerID="a0a725cd4beb746a5d82769d169f4100312905d27cb852de145a8d54bb844377" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.737992 4899 scope.go:117] "RemoveContainer" containerID="da827364a5ac1fe0474ef66174ac298f0beac493eebbf6444c87ed093a2a53f0" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.750651 4899 scope.go:117] "RemoveContainer" containerID="f9f7c681a07a7b83ef38983939eed4f7e6d396db27d02184a12f450f6632573c" Oct 03 08:44:45 crc kubenswrapper[4899]: E1003 08:44:45.751445 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9f7c681a07a7b83ef38983939eed4f7e6d396db27d02184a12f450f6632573c\": container with ID starting with f9f7c681a07a7b83ef38983939eed4f7e6d396db27d02184a12f450f6632573c not found: ID does not exist" containerID="f9f7c681a07a7b83ef38983939eed4f7e6d396db27d02184a12f450f6632573c" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.751549 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9f7c681a07a7b83ef38983939eed4f7e6d396db27d02184a12f450f6632573c"} err="failed to get container status \"f9f7c681a07a7b83ef38983939eed4f7e6d396db27d02184a12f450f6632573c\": rpc error: code = NotFound desc = could not find container \"f9f7c681a07a7b83ef38983939eed4f7e6d396db27d02184a12f450f6632573c\": container with ID starting with f9f7c681a07a7b83ef38983939eed4f7e6d396db27d02184a12f450f6632573c not found: ID does not exist" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.751589 4899 scope.go:117] "RemoveContainer" containerID="a0a725cd4beb746a5d82769d169f4100312905d27cb852de145a8d54bb844377" Oct 03 08:44:45 crc kubenswrapper[4899]: E1003 08:44:45.751997 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0a725cd4beb746a5d82769d169f4100312905d27cb852de145a8d54bb844377\": container with ID starting with a0a725cd4beb746a5d82769d169f4100312905d27cb852de145a8d54bb844377 not found: ID does not exist" containerID="a0a725cd4beb746a5d82769d169f4100312905d27cb852de145a8d54bb844377" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.752045 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0a725cd4beb746a5d82769d169f4100312905d27cb852de145a8d54bb844377"} err="failed to get container status \"a0a725cd4beb746a5d82769d169f4100312905d27cb852de145a8d54bb844377\": rpc error: code = NotFound desc = could not find container \"a0a725cd4beb746a5d82769d169f4100312905d27cb852de145a8d54bb844377\": container with ID starting with a0a725cd4beb746a5d82769d169f4100312905d27cb852de145a8d54bb844377 not found: ID does not exist" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.752075 4899 scope.go:117] "RemoveContainer" containerID="da827364a5ac1fe0474ef66174ac298f0beac493eebbf6444c87ed093a2a53f0" Oct 03 08:44:45 crc kubenswrapper[4899]: E1003 08:44:45.752361 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da827364a5ac1fe0474ef66174ac298f0beac493eebbf6444c87ed093a2a53f0\": container with ID starting with da827364a5ac1fe0474ef66174ac298f0beac493eebbf6444c87ed093a2a53f0 not found: ID does not exist" containerID="da827364a5ac1fe0474ef66174ac298f0beac493eebbf6444c87ed093a2a53f0" Oct 03 08:44:45 crc kubenswrapper[4899]: I1003 08:44:45.752425 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da827364a5ac1fe0474ef66174ac298f0beac493eebbf6444c87ed093a2a53f0"} err="failed to get container status \"da827364a5ac1fe0474ef66174ac298f0beac493eebbf6444c87ed093a2a53f0\": rpc error: code = NotFound desc = could not find container \"da827364a5ac1fe0474ef66174ac298f0beac493eebbf6444c87ed093a2a53f0\": container with ID starting with da827364a5ac1fe0474ef66174ac298f0beac493eebbf6444c87ed093a2a53f0 not found: ID does not exist" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.499586 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-stjfx" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.503623 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-stjfx" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.548404 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49764d57-9b3d-4097-a2f4-08a363c6a25f" path="/var/lib/kubelet/pods/49764d57-9b3d-4097-a2f4-08a363c6a25f/volumes" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.549257 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f" path="/var/lib/kubelet/pods/4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f/volumes" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.550194 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85fb2d07-4784-45b8-952e-2b12d61ea024" path="/var/lib/kubelet/pods/85fb2d07-4784-45b8-952e-2b12d61ea024/volumes" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.551481 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4456e30-3eb3-4e1a-b22d-8888babf06a9" path="/var/lib/kubelet/pods/a4456e30-3eb3-4e1a-b22d-8888babf06a9/volumes" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.552053 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad6ce9b2-60bf-492e-b583-9147a171e7bd" path="/var/lib/kubelet/pods/ad6ce9b2-60bf-492e-b583-9147a171e7bd/volumes" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.744819 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2xq2p"] Oct 03 08:44:46 crc kubenswrapper[4899]: E1003 08:44:46.745026 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49764d57-9b3d-4097-a2f4-08a363c6a25f" containerName="registry-server" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.745037 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="49764d57-9b3d-4097-a2f4-08a363c6a25f" containerName="registry-server" Oct 03 08:44:46 crc kubenswrapper[4899]: E1003 08:44:46.745051 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f" containerName="extract-content" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.745058 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f" containerName="extract-content" Oct 03 08:44:46 crc kubenswrapper[4899]: E1003 08:44:46.745068 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad6ce9b2-60bf-492e-b583-9147a171e7bd" containerName="extract-content" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.745074 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad6ce9b2-60bf-492e-b583-9147a171e7bd" containerName="extract-content" Oct 03 08:44:46 crc kubenswrapper[4899]: E1003 08:44:46.745081 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85fb2d07-4784-45b8-952e-2b12d61ea024" containerName="registry-server" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.745087 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="85fb2d07-4784-45b8-952e-2b12d61ea024" containerName="registry-server" Oct 03 08:44:46 crc kubenswrapper[4899]: E1003 08:44:46.745096 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49764d57-9b3d-4097-a2f4-08a363c6a25f" containerName="extract-utilities" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.745102 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="49764d57-9b3d-4097-a2f4-08a363c6a25f" containerName="extract-utilities" Oct 03 08:44:46 crc kubenswrapper[4899]: E1003 08:44:46.745110 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f" containerName="extract-utilities" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.745116 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f" containerName="extract-utilities" Oct 03 08:44:46 crc kubenswrapper[4899]: E1003 08:44:46.745123 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85fb2d07-4784-45b8-952e-2b12d61ea024" containerName="extract-utilities" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.745129 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="85fb2d07-4784-45b8-952e-2b12d61ea024" containerName="extract-utilities" Oct 03 08:44:46 crc kubenswrapper[4899]: E1003 08:44:46.745135 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad6ce9b2-60bf-492e-b583-9147a171e7bd" containerName="registry-server" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.745140 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad6ce9b2-60bf-492e-b583-9147a171e7bd" containerName="registry-server" Oct 03 08:44:46 crc kubenswrapper[4899]: E1003 08:44:46.745149 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85fb2d07-4784-45b8-952e-2b12d61ea024" containerName="extract-content" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.745155 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="85fb2d07-4784-45b8-952e-2b12d61ea024" containerName="extract-content" Oct 03 08:44:46 crc kubenswrapper[4899]: E1003 08:44:46.745162 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f" containerName="registry-server" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.745167 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f" containerName="registry-server" Oct 03 08:44:46 crc kubenswrapper[4899]: E1003 08:44:46.745176 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49764d57-9b3d-4097-a2f4-08a363c6a25f" containerName="extract-content" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.745182 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="49764d57-9b3d-4097-a2f4-08a363c6a25f" containerName="extract-content" Oct 03 08:44:46 crc kubenswrapper[4899]: E1003 08:44:46.745191 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4456e30-3eb3-4e1a-b22d-8888babf06a9" containerName="marketplace-operator" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.745198 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4456e30-3eb3-4e1a-b22d-8888babf06a9" containerName="marketplace-operator" Oct 03 08:44:46 crc kubenswrapper[4899]: E1003 08:44:46.745206 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad6ce9b2-60bf-492e-b583-9147a171e7bd" containerName="extract-utilities" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.745212 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad6ce9b2-60bf-492e-b583-9147a171e7bd" containerName="extract-utilities" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.745289 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4456e30-3eb3-4e1a-b22d-8888babf06a9" containerName="marketplace-operator" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.745299 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="49764d57-9b3d-4097-a2f4-08a363c6a25f" containerName="registry-server" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.745310 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad6ce9b2-60bf-492e-b583-9147a171e7bd" containerName="registry-server" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.745319 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="85fb2d07-4784-45b8-952e-2b12d61ea024" containerName="registry-server" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.745326 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cc805c9-c0c8-4a0c-b154-cf0a5c01a62f" containerName="registry-server" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.746003 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2xq2p" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.748079 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.757706 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2xq2p"] Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.872131 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8cl5\" (UniqueName: \"kubernetes.io/projected/3e316fba-9f8d-4fb6-9adf-0c8842bdf476-kube-api-access-q8cl5\") pod \"certified-operators-2xq2p\" (UID: \"3e316fba-9f8d-4fb6-9adf-0c8842bdf476\") " pod="openshift-marketplace/certified-operators-2xq2p" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.872200 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e316fba-9f8d-4fb6-9adf-0c8842bdf476-utilities\") pod \"certified-operators-2xq2p\" (UID: \"3e316fba-9f8d-4fb6-9adf-0c8842bdf476\") " pod="openshift-marketplace/certified-operators-2xq2p" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.872314 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e316fba-9f8d-4fb6-9adf-0c8842bdf476-catalog-content\") pod \"certified-operators-2xq2p\" (UID: \"3e316fba-9f8d-4fb6-9adf-0c8842bdf476\") " pod="openshift-marketplace/certified-operators-2xq2p" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.947261 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cpks6"] Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.948252 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cpks6" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.950062 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.955462 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cpks6"] Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.973572 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8cl5\" (UniqueName: \"kubernetes.io/projected/3e316fba-9f8d-4fb6-9adf-0c8842bdf476-kube-api-access-q8cl5\") pod \"certified-operators-2xq2p\" (UID: \"3e316fba-9f8d-4fb6-9adf-0c8842bdf476\") " pod="openshift-marketplace/certified-operators-2xq2p" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.973682 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e316fba-9f8d-4fb6-9adf-0c8842bdf476-utilities\") pod \"certified-operators-2xq2p\" (UID: \"3e316fba-9f8d-4fb6-9adf-0c8842bdf476\") " pod="openshift-marketplace/certified-operators-2xq2p" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.973745 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e316fba-9f8d-4fb6-9adf-0c8842bdf476-catalog-content\") pod \"certified-operators-2xq2p\" (UID: \"3e316fba-9f8d-4fb6-9adf-0c8842bdf476\") " pod="openshift-marketplace/certified-operators-2xq2p" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.974146 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e316fba-9f8d-4fb6-9adf-0c8842bdf476-utilities\") pod \"certified-operators-2xq2p\" (UID: \"3e316fba-9f8d-4fb6-9adf-0c8842bdf476\") " pod="openshift-marketplace/certified-operators-2xq2p" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.975142 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e316fba-9f8d-4fb6-9adf-0c8842bdf476-catalog-content\") pod \"certified-operators-2xq2p\" (UID: \"3e316fba-9f8d-4fb6-9adf-0c8842bdf476\") " pod="openshift-marketplace/certified-operators-2xq2p" Oct 03 08:44:46 crc kubenswrapper[4899]: I1003 08:44:46.993122 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8cl5\" (UniqueName: \"kubernetes.io/projected/3e316fba-9f8d-4fb6-9adf-0c8842bdf476-kube-api-access-q8cl5\") pod \"certified-operators-2xq2p\" (UID: \"3e316fba-9f8d-4fb6-9adf-0c8842bdf476\") " pod="openshift-marketplace/certified-operators-2xq2p" Oct 03 08:44:47 crc kubenswrapper[4899]: I1003 08:44:47.074309 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2xq2p" Oct 03 08:44:47 crc kubenswrapper[4899]: I1003 08:44:47.076146 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c0c4dce-aba0-4bf6-95bf-a513344e0740-catalog-content\") pod \"community-operators-cpks6\" (UID: \"2c0c4dce-aba0-4bf6-95bf-a513344e0740\") " pod="openshift-marketplace/community-operators-cpks6" Oct 03 08:44:47 crc kubenswrapper[4899]: I1003 08:44:47.076207 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c0c4dce-aba0-4bf6-95bf-a513344e0740-utilities\") pod \"community-operators-cpks6\" (UID: \"2c0c4dce-aba0-4bf6-95bf-a513344e0740\") " pod="openshift-marketplace/community-operators-cpks6" Oct 03 08:44:47 crc kubenswrapper[4899]: I1003 08:44:47.076231 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvcbz\" (UniqueName: \"kubernetes.io/projected/2c0c4dce-aba0-4bf6-95bf-a513344e0740-kube-api-access-bvcbz\") pod \"community-operators-cpks6\" (UID: \"2c0c4dce-aba0-4bf6-95bf-a513344e0740\") " pod="openshift-marketplace/community-operators-cpks6" Oct 03 08:44:47 crc kubenswrapper[4899]: I1003 08:44:47.177214 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c0c4dce-aba0-4bf6-95bf-a513344e0740-utilities\") pod \"community-operators-cpks6\" (UID: \"2c0c4dce-aba0-4bf6-95bf-a513344e0740\") " pod="openshift-marketplace/community-operators-cpks6" Oct 03 08:44:47 crc kubenswrapper[4899]: I1003 08:44:47.177252 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvcbz\" (UniqueName: \"kubernetes.io/projected/2c0c4dce-aba0-4bf6-95bf-a513344e0740-kube-api-access-bvcbz\") pod \"community-operators-cpks6\" (UID: \"2c0c4dce-aba0-4bf6-95bf-a513344e0740\") " pod="openshift-marketplace/community-operators-cpks6" Oct 03 08:44:47 crc kubenswrapper[4899]: I1003 08:44:47.177341 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c0c4dce-aba0-4bf6-95bf-a513344e0740-catalog-content\") pod \"community-operators-cpks6\" (UID: \"2c0c4dce-aba0-4bf6-95bf-a513344e0740\") " pod="openshift-marketplace/community-operators-cpks6" Oct 03 08:44:47 crc kubenswrapper[4899]: I1003 08:44:47.177915 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c0c4dce-aba0-4bf6-95bf-a513344e0740-utilities\") pod \"community-operators-cpks6\" (UID: \"2c0c4dce-aba0-4bf6-95bf-a513344e0740\") " pod="openshift-marketplace/community-operators-cpks6" Oct 03 08:44:47 crc kubenswrapper[4899]: I1003 08:44:47.178022 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c0c4dce-aba0-4bf6-95bf-a513344e0740-catalog-content\") pod \"community-operators-cpks6\" (UID: \"2c0c4dce-aba0-4bf6-95bf-a513344e0740\") " pod="openshift-marketplace/community-operators-cpks6" Oct 03 08:44:47 crc kubenswrapper[4899]: I1003 08:44:47.201376 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvcbz\" (UniqueName: \"kubernetes.io/projected/2c0c4dce-aba0-4bf6-95bf-a513344e0740-kube-api-access-bvcbz\") pod \"community-operators-cpks6\" (UID: \"2c0c4dce-aba0-4bf6-95bf-a513344e0740\") " pod="openshift-marketplace/community-operators-cpks6" Oct 03 08:44:47 crc kubenswrapper[4899]: I1003 08:44:47.265142 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cpks6" Oct 03 08:44:47 crc kubenswrapper[4899]: I1003 08:44:47.271009 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2xq2p"] Oct 03 08:44:47 crc kubenswrapper[4899]: W1003 08:44:47.282990 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3e316fba_9f8d_4fb6_9adf_0c8842bdf476.slice/crio-3a23f0d5c25953f7304232c93877c1201eebe57a0ff55ad76b7c26d5a537d763 WatchSource:0}: Error finding container 3a23f0d5c25953f7304232c93877c1201eebe57a0ff55ad76b7c26d5a537d763: Status 404 returned error can't find the container with id 3a23f0d5c25953f7304232c93877c1201eebe57a0ff55ad76b7c26d5a537d763 Oct 03 08:44:47 crc kubenswrapper[4899]: I1003 08:44:47.447531 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cpks6"] Oct 03 08:44:47 crc kubenswrapper[4899]: W1003 08:44:47.500247 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2c0c4dce_aba0_4bf6_95bf_a513344e0740.slice/crio-56fed69288c864275088a1ba487171c04c9fcc20fb7fb56212c338cced01c057 WatchSource:0}: Error finding container 56fed69288c864275088a1ba487171c04c9fcc20fb7fb56212c338cced01c057: Status 404 returned error can't find the container with id 56fed69288c864275088a1ba487171c04c9fcc20fb7fb56212c338cced01c057 Oct 03 08:44:47 crc kubenswrapper[4899]: I1003 08:44:47.506299 4899 generic.go:334] "Generic (PLEG): container finished" podID="3e316fba-9f8d-4fb6-9adf-0c8842bdf476" containerID="e2f963681cb9c7884d54872f0b9f9cf2e7736d55ad4a6537611fe31392ca01b4" exitCode=0 Oct 03 08:44:47 crc kubenswrapper[4899]: I1003 08:44:47.506451 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2xq2p" event={"ID":"3e316fba-9f8d-4fb6-9adf-0c8842bdf476","Type":"ContainerDied","Data":"e2f963681cb9c7884d54872f0b9f9cf2e7736d55ad4a6537611fe31392ca01b4"} Oct 03 08:44:47 crc kubenswrapper[4899]: I1003 08:44:47.506497 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2xq2p" event={"ID":"3e316fba-9f8d-4fb6-9adf-0c8842bdf476","Type":"ContainerStarted","Data":"3a23f0d5c25953f7304232c93877c1201eebe57a0ff55ad76b7c26d5a537d763"} Oct 03 08:44:48 crc kubenswrapper[4899]: I1003 08:44:48.513313 4899 generic.go:334] "Generic (PLEG): container finished" podID="3e316fba-9f8d-4fb6-9adf-0c8842bdf476" containerID="ef09e493193713943ebfc6dba0a14243ad2ccd48284b521a04df9931c7339af0" exitCode=0 Oct 03 08:44:48 crc kubenswrapper[4899]: I1003 08:44:48.513433 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2xq2p" event={"ID":"3e316fba-9f8d-4fb6-9adf-0c8842bdf476","Type":"ContainerDied","Data":"ef09e493193713943ebfc6dba0a14243ad2ccd48284b521a04df9931c7339af0"} Oct 03 08:44:48 crc kubenswrapper[4899]: I1003 08:44:48.515650 4899 generic.go:334] "Generic (PLEG): container finished" podID="2c0c4dce-aba0-4bf6-95bf-a513344e0740" containerID="e170abda9552febfafb84ba9f2a689f2d17f17ee0a71beae2ab438fb9fff54c2" exitCode=0 Oct 03 08:44:48 crc kubenswrapper[4899]: I1003 08:44:48.515735 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cpks6" event={"ID":"2c0c4dce-aba0-4bf6-95bf-a513344e0740","Type":"ContainerDied","Data":"e170abda9552febfafb84ba9f2a689f2d17f17ee0a71beae2ab438fb9fff54c2"} Oct 03 08:44:48 crc kubenswrapper[4899]: I1003 08:44:48.515788 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cpks6" event={"ID":"2c0c4dce-aba0-4bf6-95bf-a513344e0740","Type":"ContainerStarted","Data":"56fed69288c864275088a1ba487171c04c9fcc20fb7fb56212c338cced01c057"} Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.149429 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8bmxl"] Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.150967 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8bmxl" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.152934 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.161394 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8bmxl"] Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.302005 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpxb8\" (UniqueName: \"kubernetes.io/projected/16e11948-6a93-444d-b9f9-c8f60100475c-kube-api-access-jpxb8\") pod \"redhat-marketplace-8bmxl\" (UID: \"16e11948-6a93-444d-b9f9-c8f60100475c\") " pod="openshift-marketplace/redhat-marketplace-8bmxl" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.302060 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16e11948-6a93-444d-b9f9-c8f60100475c-utilities\") pod \"redhat-marketplace-8bmxl\" (UID: \"16e11948-6a93-444d-b9f9-c8f60100475c\") " pod="openshift-marketplace/redhat-marketplace-8bmxl" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.302140 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16e11948-6a93-444d-b9f9-c8f60100475c-catalog-content\") pod \"redhat-marketplace-8bmxl\" (UID: \"16e11948-6a93-444d-b9f9-c8f60100475c\") " pod="openshift-marketplace/redhat-marketplace-8bmxl" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.348363 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hz84h"] Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.354442 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hz84h" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.354627 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hz84h"] Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.356628 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.403586 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8d22cd4-b04a-4b88-acd8-0949b94edd47-utilities\") pod \"redhat-operators-hz84h\" (UID: \"a8d22cd4-b04a-4b88-acd8-0949b94edd47\") " pod="openshift-marketplace/redhat-operators-hz84h" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.403633 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpxb8\" (UniqueName: \"kubernetes.io/projected/16e11948-6a93-444d-b9f9-c8f60100475c-kube-api-access-jpxb8\") pod \"redhat-marketplace-8bmxl\" (UID: \"16e11948-6a93-444d-b9f9-c8f60100475c\") " pod="openshift-marketplace/redhat-marketplace-8bmxl" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.403659 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16e11948-6a93-444d-b9f9-c8f60100475c-utilities\") pod \"redhat-marketplace-8bmxl\" (UID: \"16e11948-6a93-444d-b9f9-c8f60100475c\") " pod="openshift-marketplace/redhat-marketplace-8bmxl" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.403691 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5pfh\" (UniqueName: \"kubernetes.io/projected/a8d22cd4-b04a-4b88-acd8-0949b94edd47-kube-api-access-z5pfh\") pod \"redhat-operators-hz84h\" (UID: \"a8d22cd4-b04a-4b88-acd8-0949b94edd47\") " pod="openshift-marketplace/redhat-operators-hz84h" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.403709 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16e11948-6a93-444d-b9f9-c8f60100475c-catalog-content\") pod \"redhat-marketplace-8bmxl\" (UID: \"16e11948-6a93-444d-b9f9-c8f60100475c\") " pod="openshift-marketplace/redhat-marketplace-8bmxl" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.403723 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8d22cd4-b04a-4b88-acd8-0949b94edd47-catalog-content\") pod \"redhat-operators-hz84h\" (UID: \"a8d22cd4-b04a-4b88-acd8-0949b94edd47\") " pod="openshift-marketplace/redhat-operators-hz84h" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.404214 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16e11948-6a93-444d-b9f9-c8f60100475c-utilities\") pod \"redhat-marketplace-8bmxl\" (UID: \"16e11948-6a93-444d-b9f9-c8f60100475c\") " pod="openshift-marketplace/redhat-marketplace-8bmxl" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.404714 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16e11948-6a93-444d-b9f9-c8f60100475c-catalog-content\") pod \"redhat-marketplace-8bmxl\" (UID: \"16e11948-6a93-444d-b9f9-c8f60100475c\") " pod="openshift-marketplace/redhat-marketplace-8bmxl" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.423697 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpxb8\" (UniqueName: \"kubernetes.io/projected/16e11948-6a93-444d-b9f9-c8f60100475c-kube-api-access-jpxb8\") pod \"redhat-marketplace-8bmxl\" (UID: \"16e11948-6a93-444d-b9f9-c8f60100475c\") " pod="openshift-marketplace/redhat-marketplace-8bmxl" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.481612 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8bmxl" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.506513 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8d22cd4-b04a-4b88-acd8-0949b94edd47-utilities\") pod \"redhat-operators-hz84h\" (UID: \"a8d22cd4-b04a-4b88-acd8-0949b94edd47\") " pod="openshift-marketplace/redhat-operators-hz84h" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.506633 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5pfh\" (UniqueName: \"kubernetes.io/projected/a8d22cd4-b04a-4b88-acd8-0949b94edd47-kube-api-access-z5pfh\") pod \"redhat-operators-hz84h\" (UID: \"a8d22cd4-b04a-4b88-acd8-0949b94edd47\") " pod="openshift-marketplace/redhat-operators-hz84h" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.506667 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8d22cd4-b04a-4b88-acd8-0949b94edd47-catalog-content\") pod \"redhat-operators-hz84h\" (UID: \"a8d22cd4-b04a-4b88-acd8-0949b94edd47\") " pod="openshift-marketplace/redhat-operators-hz84h" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.507176 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8d22cd4-b04a-4b88-acd8-0949b94edd47-catalog-content\") pod \"redhat-operators-hz84h\" (UID: \"a8d22cd4-b04a-4b88-acd8-0949b94edd47\") " pod="openshift-marketplace/redhat-operators-hz84h" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.507499 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8d22cd4-b04a-4b88-acd8-0949b94edd47-utilities\") pod \"redhat-operators-hz84h\" (UID: \"a8d22cd4-b04a-4b88-acd8-0949b94edd47\") " pod="openshift-marketplace/redhat-operators-hz84h" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.527358 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2xq2p" event={"ID":"3e316fba-9f8d-4fb6-9adf-0c8842bdf476","Type":"ContainerStarted","Data":"a21e5546d9a4275e6bf8b9b9c0a2dda95ef7831432d358510f43c02a3a5650b7"} Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.531909 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cpks6" event={"ID":"2c0c4dce-aba0-4bf6-95bf-a513344e0740","Type":"ContainerStarted","Data":"fa1a80a43bb844ba17351623dd4eca03e34f2e05fb38156df5784386719236d4"} Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.533130 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5pfh\" (UniqueName: \"kubernetes.io/projected/a8d22cd4-b04a-4b88-acd8-0949b94edd47-kube-api-access-z5pfh\") pod \"redhat-operators-hz84h\" (UID: \"a8d22cd4-b04a-4b88-acd8-0949b94edd47\") " pod="openshift-marketplace/redhat-operators-hz84h" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.545625 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2xq2p" podStartSLOduration=1.844147117 podStartE2EDuration="3.54560504s" podCreationTimestamp="2025-10-03 08:44:46 +0000 UTC" firstStartedPulling="2025-10-03 08:44:47.508302383 +0000 UTC m=+261.615787336" lastFinishedPulling="2025-10-03 08:44:49.209760296 +0000 UTC m=+263.317245259" observedRunningTime="2025-10-03 08:44:49.544589567 +0000 UTC m=+263.652074540" watchObservedRunningTime="2025-10-03 08:44:49.54560504 +0000 UTC m=+263.653089993" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.672397 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hz84h" Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.746225 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8bmxl"] Oct 03 08:44:49 crc kubenswrapper[4899]: W1003 08:44:49.755078 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod16e11948_6a93_444d_b9f9_c8f60100475c.slice/crio-1a2afa1d334662fdad2b0a3c9bd37a33f703a949d06a69d4c5de0ed0524086f9 WatchSource:0}: Error finding container 1a2afa1d334662fdad2b0a3c9bd37a33f703a949d06a69d4c5de0ed0524086f9: Status 404 returned error can't find the container with id 1a2afa1d334662fdad2b0a3c9bd37a33f703a949d06a69d4c5de0ed0524086f9 Oct 03 08:44:49 crc kubenswrapper[4899]: I1003 08:44:49.909920 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hz84h"] Oct 03 08:44:49 crc kubenswrapper[4899]: W1003 08:44:49.919866 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda8d22cd4_b04a_4b88_acd8_0949b94edd47.slice/crio-7235ed18e8c2d9a107046eab286507b5c4a3f628a16f6f731d2409efd996992b WatchSource:0}: Error finding container 7235ed18e8c2d9a107046eab286507b5c4a3f628a16f6f731d2409efd996992b: Status 404 returned error can't find the container with id 7235ed18e8c2d9a107046eab286507b5c4a3f628a16f6f731d2409efd996992b Oct 03 08:44:50 crc kubenswrapper[4899]: I1003 08:44:50.537935 4899 generic.go:334] "Generic (PLEG): container finished" podID="16e11948-6a93-444d-b9f9-c8f60100475c" containerID="41d4c2f540a4e619c838792c0392f1b07c5821e1b72991e23a87751d446bdde4" exitCode=0 Oct 03 08:44:50 crc kubenswrapper[4899]: I1003 08:44:50.538031 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8bmxl" event={"ID":"16e11948-6a93-444d-b9f9-c8f60100475c","Type":"ContainerDied","Data":"41d4c2f540a4e619c838792c0392f1b07c5821e1b72991e23a87751d446bdde4"} Oct 03 08:44:50 crc kubenswrapper[4899]: I1003 08:44:50.538302 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8bmxl" event={"ID":"16e11948-6a93-444d-b9f9-c8f60100475c","Type":"ContainerStarted","Data":"1a2afa1d334662fdad2b0a3c9bd37a33f703a949d06a69d4c5de0ed0524086f9"} Oct 03 08:44:50 crc kubenswrapper[4899]: I1003 08:44:50.541132 4899 generic.go:334] "Generic (PLEG): container finished" podID="a8d22cd4-b04a-4b88-acd8-0949b94edd47" containerID="1071f1e20ea9b7750d3c14a653604c9fa46dfc6598b15c646d1c63a727c29c51" exitCode=0 Oct 03 08:44:50 crc kubenswrapper[4899]: I1003 08:44:50.541214 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hz84h" event={"ID":"a8d22cd4-b04a-4b88-acd8-0949b94edd47","Type":"ContainerDied","Data":"1071f1e20ea9b7750d3c14a653604c9fa46dfc6598b15c646d1c63a727c29c51"} Oct 03 08:44:50 crc kubenswrapper[4899]: I1003 08:44:50.541261 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hz84h" event={"ID":"a8d22cd4-b04a-4b88-acd8-0949b94edd47","Type":"ContainerStarted","Data":"7235ed18e8c2d9a107046eab286507b5c4a3f628a16f6f731d2409efd996992b"} Oct 03 08:44:50 crc kubenswrapper[4899]: I1003 08:44:50.545577 4899 generic.go:334] "Generic (PLEG): container finished" podID="2c0c4dce-aba0-4bf6-95bf-a513344e0740" containerID="fa1a80a43bb844ba17351623dd4eca03e34f2e05fb38156df5784386719236d4" exitCode=0 Oct 03 08:44:50 crc kubenswrapper[4899]: I1003 08:44:50.546277 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cpks6" event={"ID":"2c0c4dce-aba0-4bf6-95bf-a513344e0740","Type":"ContainerDied","Data":"fa1a80a43bb844ba17351623dd4eca03e34f2e05fb38156df5784386719236d4"} Oct 03 08:44:51 crc kubenswrapper[4899]: I1003 08:44:51.555370 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hz84h" event={"ID":"a8d22cd4-b04a-4b88-acd8-0949b94edd47","Type":"ContainerStarted","Data":"300aacfc0d389ef9ad03d56411cd57a23a139696527116a6f9861215748a1b04"} Oct 03 08:44:51 crc kubenswrapper[4899]: I1003 08:44:51.558032 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cpks6" event={"ID":"2c0c4dce-aba0-4bf6-95bf-a513344e0740","Type":"ContainerStarted","Data":"72abfbac36734d4caa0cab4735f19cd856c8b24fea2a2eb319606390defcc618"} Oct 03 08:44:51 crc kubenswrapper[4899]: I1003 08:44:51.586074 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cpks6" podStartSLOduration=2.925112686 podStartE2EDuration="5.5860522s" podCreationTimestamp="2025-10-03 08:44:46 +0000 UTC" firstStartedPulling="2025-10-03 08:44:48.518345082 +0000 UTC m=+262.625830035" lastFinishedPulling="2025-10-03 08:44:51.179284606 +0000 UTC m=+265.286769549" observedRunningTime="2025-10-03 08:44:51.585076899 +0000 UTC m=+265.692561852" watchObservedRunningTime="2025-10-03 08:44:51.5860522 +0000 UTC m=+265.693537153" Oct 03 08:44:52 crc kubenswrapper[4899]: I1003 08:44:52.565139 4899 generic.go:334] "Generic (PLEG): container finished" podID="a8d22cd4-b04a-4b88-acd8-0949b94edd47" containerID="300aacfc0d389ef9ad03d56411cd57a23a139696527116a6f9861215748a1b04" exitCode=0 Oct 03 08:44:52 crc kubenswrapper[4899]: I1003 08:44:52.567957 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hz84h" event={"ID":"a8d22cd4-b04a-4b88-acd8-0949b94edd47","Type":"ContainerDied","Data":"300aacfc0d389ef9ad03d56411cd57a23a139696527116a6f9861215748a1b04"} Oct 03 08:44:53 crc kubenswrapper[4899]: I1003 08:44:53.572012 4899 generic.go:334] "Generic (PLEG): container finished" podID="16e11948-6a93-444d-b9f9-c8f60100475c" containerID="7f9bc32cef89096ca3bcb47893d9bea0dab322a722362f23a8b24cc876c19ca1" exitCode=0 Oct 03 08:44:53 crc kubenswrapper[4899]: I1003 08:44:53.572087 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8bmxl" event={"ID":"16e11948-6a93-444d-b9f9-c8f60100475c","Type":"ContainerDied","Data":"7f9bc32cef89096ca3bcb47893d9bea0dab322a722362f23a8b24cc876c19ca1"} Oct 03 08:44:53 crc kubenswrapper[4899]: I1003 08:44:53.577371 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hz84h" event={"ID":"a8d22cd4-b04a-4b88-acd8-0949b94edd47","Type":"ContainerStarted","Data":"6f1020e0a9911980b8457201889fa15b7ee974d24921cfc614d9d4d092b12e34"} Oct 03 08:44:53 crc kubenswrapper[4899]: I1003 08:44:53.607779 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hz84h" podStartSLOduration=1.89319697 podStartE2EDuration="4.607756534s" podCreationTimestamp="2025-10-03 08:44:49 +0000 UTC" firstStartedPulling="2025-10-03 08:44:50.542625949 +0000 UTC m=+264.650110902" lastFinishedPulling="2025-10-03 08:44:53.257185513 +0000 UTC m=+267.364670466" observedRunningTime="2025-10-03 08:44:53.605635508 +0000 UTC m=+267.713120461" watchObservedRunningTime="2025-10-03 08:44:53.607756534 +0000 UTC m=+267.715241507" Oct 03 08:44:54 crc kubenswrapper[4899]: I1003 08:44:54.587316 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8bmxl" event={"ID":"16e11948-6a93-444d-b9f9-c8f60100475c","Type":"ContainerStarted","Data":"89009e4c3b02286ee64181ea7bc8746bf2753c4fc6905248f1a9d6497fc3a39f"} Oct 03 08:44:54 crc kubenswrapper[4899]: I1003 08:44:54.604051 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8bmxl" podStartSLOduration=1.9914271860000001 podStartE2EDuration="5.60403433s" podCreationTimestamp="2025-10-03 08:44:49 +0000 UTC" firstStartedPulling="2025-10-03 08:44:50.539946454 +0000 UTC m=+264.647431407" lastFinishedPulling="2025-10-03 08:44:54.152553598 +0000 UTC m=+268.260038551" observedRunningTime="2025-10-03 08:44:54.601687516 +0000 UTC m=+268.709172489" watchObservedRunningTime="2025-10-03 08:44:54.60403433 +0000 UTC m=+268.711519283" Oct 03 08:44:57 crc kubenswrapper[4899]: I1003 08:44:57.075235 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2xq2p" Oct 03 08:44:57 crc kubenswrapper[4899]: I1003 08:44:57.075309 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2xq2p" Oct 03 08:44:57 crc kubenswrapper[4899]: I1003 08:44:57.117290 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2xq2p" Oct 03 08:44:57 crc kubenswrapper[4899]: I1003 08:44:57.265874 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cpks6" Oct 03 08:44:57 crc kubenswrapper[4899]: I1003 08:44:57.266233 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cpks6" Oct 03 08:44:57 crc kubenswrapper[4899]: I1003 08:44:57.306941 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cpks6" Oct 03 08:44:57 crc kubenswrapper[4899]: I1003 08:44:57.639928 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2xq2p" Oct 03 08:44:57 crc kubenswrapper[4899]: I1003 08:44:57.640242 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cpks6" Oct 03 08:44:59 crc kubenswrapper[4899]: I1003 08:44:59.481767 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8bmxl" Oct 03 08:44:59 crc kubenswrapper[4899]: I1003 08:44:59.482933 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8bmxl" Oct 03 08:44:59 crc kubenswrapper[4899]: I1003 08:44:59.526446 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8bmxl" Oct 03 08:44:59 crc kubenswrapper[4899]: I1003 08:44:59.647291 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8bmxl" Oct 03 08:44:59 crc kubenswrapper[4899]: I1003 08:44:59.673120 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hz84h" Oct 03 08:44:59 crc kubenswrapper[4899]: I1003 08:44:59.673174 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hz84h" Oct 03 08:44:59 crc kubenswrapper[4899]: I1003 08:44:59.707640 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hz84h" Oct 03 08:45:00 crc kubenswrapper[4899]: I1003 08:45:00.147140 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l"] Oct 03 08:45:00 crc kubenswrapper[4899]: I1003 08:45:00.147845 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l" Oct 03 08:45:00 crc kubenswrapper[4899]: I1003 08:45:00.149565 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 08:45:00 crc kubenswrapper[4899]: I1003 08:45:00.151905 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 08:45:00 crc kubenswrapper[4899]: I1003 08:45:00.192267 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l"] Oct 03 08:45:00 crc kubenswrapper[4899]: I1003 08:45:00.242949 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1514d516-ecf8-473e-814a-5b675c7b23bc-config-volume\") pod \"collect-profiles-29324685-dcr2l\" (UID: \"1514d516-ecf8-473e-814a-5b675c7b23bc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l" Oct 03 08:45:00 crc kubenswrapper[4899]: I1003 08:45:00.243124 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1514d516-ecf8-473e-814a-5b675c7b23bc-secret-volume\") pod \"collect-profiles-29324685-dcr2l\" (UID: \"1514d516-ecf8-473e-814a-5b675c7b23bc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l" Oct 03 08:45:00 crc kubenswrapper[4899]: I1003 08:45:00.243267 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zg4b5\" (UniqueName: \"kubernetes.io/projected/1514d516-ecf8-473e-814a-5b675c7b23bc-kube-api-access-zg4b5\") pod \"collect-profiles-29324685-dcr2l\" (UID: \"1514d516-ecf8-473e-814a-5b675c7b23bc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l" Oct 03 08:45:00 crc kubenswrapper[4899]: I1003 08:45:00.344565 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zg4b5\" (UniqueName: \"kubernetes.io/projected/1514d516-ecf8-473e-814a-5b675c7b23bc-kube-api-access-zg4b5\") pod \"collect-profiles-29324685-dcr2l\" (UID: \"1514d516-ecf8-473e-814a-5b675c7b23bc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l" Oct 03 08:45:00 crc kubenswrapper[4899]: I1003 08:45:00.344647 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1514d516-ecf8-473e-814a-5b675c7b23bc-config-volume\") pod \"collect-profiles-29324685-dcr2l\" (UID: \"1514d516-ecf8-473e-814a-5b675c7b23bc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l" Oct 03 08:45:00 crc kubenswrapper[4899]: I1003 08:45:00.344692 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1514d516-ecf8-473e-814a-5b675c7b23bc-secret-volume\") pod \"collect-profiles-29324685-dcr2l\" (UID: \"1514d516-ecf8-473e-814a-5b675c7b23bc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l" Oct 03 08:45:00 crc kubenswrapper[4899]: I1003 08:45:00.345841 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1514d516-ecf8-473e-814a-5b675c7b23bc-config-volume\") pod \"collect-profiles-29324685-dcr2l\" (UID: \"1514d516-ecf8-473e-814a-5b675c7b23bc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l" Oct 03 08:45:00 crc kubenswrapper[4899]: I1003 08:45:00.354572 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1514d516-ecf8-473e-814a-5b675c7b23bc-secret-volume\") pod \"collect-profiles-29324685-dcr2l\" (UID: \"1514d516-ecf8-473e-814a-5b675c7b23bc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l" Oct 03 08:45:00 crc kubenswrapper[4899]: I1003 08:45:00.363687 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zg4b5\" (UniqueName: \"kubernetes.io/projected/1514d516-ecf8-473e-814a-5b675c7b23bc-kube-api-access-zg4b5\") pod \"collect-profiles-29324685-dcr2l\" (UID: \"1514d516-ecf8-473e-814a-5b675c7b23bc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l" Oct 03 08:45:00 crc kubenswrapper[4899]: I1003 08:45:00.466950 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l" Oct 03 08:45:00 crc kubenswrapper[4899]: I1003 08:45:00.635400 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l"] Oct 03 08:45:00 crc kubenswrapper[4899]: I1003 08:45:00.658093 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hz84h" Oct 03 08:45:01 crc kubenswrapper[4899]: I1003 08:45:01.625776 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l" event={"ID":"1514d516-ecf8-473e-814a-5b675c7b23bc","Type":"ContainerStarted","Data":"99c194b1dc58a146ea6f09fab9b7203113df5b20560969a632cf01246655be9b"} Oct 03 08:45:02 crc kubenswrapper[4899]: I1003 08:45:02.631348 4899 generic.go:334] "Generic (PLEG): container finished" podID="1514d516-ecf8-473e-814a-5b675c7b23bc" containerID="6c5ff0f918cb3b89ddbfb13c42e83931df59f27fa30a62f18e1e97e84b539a09" exitCode=0 Oct 03 08:45:02 crc kubenswrapper[4899]: I1003 08:45:02.631390 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l" event={"ID":"1514d516-ecf8-473e-814a-5b675c7b23bc","Type":"ContainerDied","Data":"6c5ff0f918cb3b89ddbfb13c42e83931df59f27fa30a62f18e1e97e84b539a09"} Oct 03 08:45:03 crc kubenswrapper[4899]: I1003 08:45:03.834446 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l" Oct 03 08:45:03 crc kubenswrapper[4899]: I1003 08:45:03.993427 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1514d516-ecf8-473e-814a-5b675c7b23bc-secret-volume\") pod \"1514d516-ecf8-473e-814a-5b675c7b23bc\" (UID: \"1514d516-ecf8-473e-814a-5b675c7b23bc\") " Oct 03 08:45:03 crc kubenswrapper[4899]: I1003 08:45:03.993588 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zg4b5\" (UniqueName: \"kubernetes.io/projected/1514d516-ecf8-473e-814a-5b675c7b23bc-kube-api-access-zg4b5\") pod \"1514d516-ecf8-473e-814a-5b675c7b23bc\" (UID: \"1514d516-ecf8-473e-814a-5b675c7b23bc\") " Oct 03 08:45:03 crc kubenswrapper[4899]: I1003 08:45:03.993624 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1514d516-ecf8-473e-814a-5b675c7b23bc-config-volume\") pod \"1514d516-ecf8-473e-814a-5b675c7b23bc\" (UID: \"1514d516-ecf8-473e-814a-5b675c7b23bc\") " Oct 03 08:45:03 crc kubenswrapper[4899]: I1003 08:45:03.994560 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1514d516-ecf8-473e-814a-5b675c7b23bc-config-volume" (OuterVolumeSpecName: "config-volume") pod "1514d516-ecf8-473e-814a-5b675c7b23bc" (UID: "1514d516-ecf8-473e-814a-5b675c7b23bc"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:45:03 crc kubenswrapper[4899]: I1003 08:45:03.999271 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1514d516-ecf8-473e-814a-5b675c7b23bc-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1514d516-ecf8-473e-814a-5b675c7b23bc" (UID: "1514d516-ecf8-473e-814a-5b675c7b23bc"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:45:04 crc kubenswrapper[4899]: I1003 08:45:04.000053 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1514d516-ecf8-473e-814a-5b675c7b23bc-kube-api-access-zg4b5" (OuterVolumeSpecName: "kube-api-access-zg4b5") pod "1514d516-ecf8-473e-814a-5b675c7b23bc" (UID: "1514d516-ecf8-473e-814a-5b675c7b23bc"). InnerVolumeSpecName "kube-api-access-zg4b5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:45:04 crc kubenswrapper[4899]: I1003 08:45:04.095382 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zg4b5\" (UniqueName: \"kubernetes.io/projected/1514d516-ecf8-473e-814a-5b675c7b23bc-kube-api-access-zg4b5\") on node \"crc\" DevicePath \"\"" Oct 03 08:45:04 crc kubenswrapper[4899]: I1003 08:45:04.095421 4899 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1514d516-ecf8-473e-814a-5b675c7b23bc-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 08:45:04 crc kubenswrapper[4899]: I1003 08:45:04.095432 4899 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1514d516-ecf8-473e-814a-5b675c7b23bc-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 08:45:04 crc kubenswrapper[4899]: I1003 08:45:04.641828 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l" event={"ID":"1514d516-ecf8-473e-814a-5b675c7b23bc","Type":"ContainerDied","Data":"99c194b1dc58a146ea6f09fab9b7203113df5b20560969a632cf01246655be9b"} Oct 03 08:45:04 crc kubenswrapper[4899]: I1003 08:45:04.641916 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99c194b1dc58a146ea6f09fab9b7203113df5b20560969a632cf01246655be9b" Oct 03 08:45:04 crc kubenswrapper[4899]: I1003 08:45:04.641931 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l" Oct 03 08:46:12 crc kubenswrapper[4899]: I1003 08:46:12.199437 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 08:46:12 crc kubenswrapper[4899]: I1003 08:46:12.200433 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 08:46:42 crc kubenswrapper[4899]: I1003 08:46:42.198276 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 08:46:42 crc kubenswrapper[4899]: I1003 08:46:42.198833 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 08:47:12 crc kubenswrapper[4899]: I1003 08:47:12.198548 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 08:47:12 crc kubenswrapper[4899]: I1003 08:47:12.199109 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 08:47:12 crc kubenswrapper[4899]: I1003 08:47:12.199182 4899 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:47:12 crc kubenswrapper[4899]: I1003 08:47:12.199828 4899 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c575796f37c9e9cc7bfd1aa952849ac3387d60b1b8fe207a0308043edd984a52"} pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 08:47:12 crc kubenswrapper[4899]: I1003 08:47:12.199916 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" containerID="cri-o://c575796f37c9e9cc7bfd1aa952849ac3387d60b1b8fe207a0308043edd984a52" gracePeriod=600 Oct 03 08:47:13 crc kubenswrapper[4899]: I1003 08:47:13.285922 4899 generic.go:334] "Generic (PLEG): container finished" podID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerID="c575796f37c9e9cc7bfd1aa952849ac3387d60b1b8fe207a0308043edd984a52" exitCode=0 Oct 03 08:47:13 crc kubenswrapper[4899]: I1003 08:47:13.285998 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerDied","Data":"c575796f37c9e9cc7bfd1aa952849ac3387d60b1b8fe207a0308043edd984a52"} Oct 03 08:47:13 crc kubenswrapper[4899]: I1003 08:47:13.286478 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerStarted","Data":"6317475fe02e5878e0a0ad7c510b598f3249a496df5a7826416b0c9a1c2435dc"} Oct 03 08:47:13 crc kubenswrapper[4899]: I1003 08:47:13.286502 4899 scope.go:117] "RemoveContainer" containerID="1fac557f649f2cb3d48178e66223cf1bf7b2eda9b9296a51f3f5ce9ca1cea395" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.430907 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bk7gk"] Oct 03 08:48:10 crc kubenswrapper[4899]: E1003 08:48:10.431948 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1514d516-ecf8-473e-814a-5b675c7b23bc" containerName="collect-profiles" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.431963 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="1514d516-ecf8-473e-814a-5b675c7b23bc" containerName="collect-profiles" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.432133 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="1514d516-ecf8-473e-814a-5b675c7b23bc" containerName="collect-profiles" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.432683 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.443258 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bk7gk"] Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.628692 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/22010d0f-44b2-405f-8055-1578313e3442-registry-tls\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.628867 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/22010d0f-44b2-405f-8055-1578313e3442-registry-certificates\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.628891 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/22010d0f-44b2-405f-8055-1578313e3442-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.628932 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7h4qd\" (UniqueName: \"kubernetes.io/projected/22010d0f-44b2-405f-8055-1578313e3442-kube-api-access-7h4qd\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.628962 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.629007 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/22010d0f-44b2-405f-8055-1578313e3442-trusted-ca\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.629037 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/22010d0f-44b2-405f-8055-1578313e3442-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.629081 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/22010d0f-44b2-405f-8055-1578313e3442-bound-sa-token\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.654484 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.730031 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7h4qd\" (UniqueName: \"kubernetes.io/projected/22010d0f-44b2-405f-8055-1578313e3442-kube-api-access-7h4qd\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.730090 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/22010d0f-44b2-405f-8055-1578313e3442-trusted-ca\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.730110 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/22010d0f-44b2-405f-8055-1578313e3442-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.730140 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/22010d0f-44b2-405f-8055-1578313e3442-bound-sa-token\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.730174 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/22010d0f-44b2-405f-8055-1578313e3442-registry-tls\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.730200 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/22010d0f-44b2-405f-8055-1578313e3442-registry-certificates\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.730220 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/22010d0f-44b2-405f-8055-1578313e3442-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.730803 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/22010d0f-44b2-405f-8055-1578313e3442-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.732151 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/22010d0f-44b2-405f-8055-1578313e3442-trusted-ca\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.732747 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/22010d0f-44b2-405f-8055-1578313e3442-registry-certificates\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.738549 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/22010d0f-44b2-405f-8055-1578313e3442-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.738584 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/22010d0f-44b2-405f-8055-1578313e3442-registry-tls\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.747189 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/22010d0f-44b2-405f-8055-1578313e3442-bound-sa-token\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.748105 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7h4qd\" (UniqueName: \"kubernetes.io/projected/22010d0f-44b2-405f-8055-1578313e3442-kube-api-access-7h4qd\") pod \"image-registry-66df7c8f76-bk7gk\" (UID: \"22010d0f-44b2-405f-8055-1578313e3442\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.804984 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:10 crc kubenswrapper[4899]: I1003 08:48:10.981649 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bk7gk"] Oct 03 08:48:11 crc kubenswrapper[4899]: I1003 08:48:11.565524 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" event={"ID":"22010d0f-44b2-405f-8055-1578313e3442","Type":"ContainerStarted","Data":"a92e37caddedd3b067be0dcedce56ab0f12b8c4f8de408f842130d0cc51b9492"} Oct 03 08:48:11 crc kubenswrapper[4899]: I1003 08:48:11.565564 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" event={"ID":"22010d0f-44b2-405f-8055-1578313e3442","Type":"ContainerStarted","Data":"fdc3335da8f1d351ede01a42774487ef24e2db435f3f7d13db5f7025fa2c1da9"} Oct 03 08:48:11 crc kubenswrapper[4899]: I1003 08:48:11.565747 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:11 crc kubenswrapper[4899]: I1003 08:48:11.588128 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" podStartSLOduration=1.588112444 podStartE2EDuration="1.588112444s" podCreationTimestamp="2025-10-03 08:48:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:48:11.583743788 +0000 UTC m=+465.691228761" watchObservedRunningTime="2025-10-03 08:48:11.588112444 +0000 UTC m=+465.695597397" Oct 03 08:48:30 crc kubenswrapper[4899]: I1003 08:48:30.810576 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-bk7gk" Oct 03 08:48:30 crc kubenswrapper[4899]: I1003 08:48:30.849190 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l5vs7"] Oct 03 08:48:55 crc kubenswrapper[4899]: I1003 08:48:55.927198 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" podUID="aee31d1e-9049-4b14-9544-e26bc3ea2b38" containerName="registry" containerID="cri-o://908a4b69aba1aa56fd8e0d2838bf125308a8a55589a0a3213076b22c27cbb5ed" gracePeriod=30 Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.256304 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.429190 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/aee31d1e-9049-4b14-9544-e26bc3ea2b38-registry-certificates\") pod \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.429243 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aee31d1e-9049-4b14-9544-e26bc3ea2b38-trusted-ca\") pod \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.429326 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/aee31d1e-9049-4b14-9544-e26bc3ea2b38-registry-tls\") pod \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.429382 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/aee31d1e-9049-4b14-9544-e26bc3ea2b38-ca-trust-extracted\") pod \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.429514 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.429557 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/aee31d1e-9049-4b14-9544-e26bc3ea2b38-installation-pull-secrets\") pod \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.429589 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aee31d1e-9049-4b14-9544-e26bc3ea2b38-bound-sa-token\") pod \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.429608 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h64pb\" (UniqueName: \"kubernetes.io/projected/aee31d1e-9049-4b14-9544-e26bc3ea2b38-kube-api-access-h64pb\") pod \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\" (UID: \"aee31d1e-9049-4b14-9544-e26bc3ea2b38\") " Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.430614 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aee31d1e-9049-4b14-9544-e26bc3ea2b38-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "aee31d1e-9049-4b14-9544-e26bc3ea2b38" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.430819 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aee31d1e-9049-4b14-9544-e26bc3ea2b38-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "aee31d1e-9049-4b14-9544-e26bc3ea2b38" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.435290 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aee31d1e-9049-4b14-9544-e26bc3ea2b38-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "aee31d1e-9049-4b14-9544-e26bc3ea2b38" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.435818 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aee31d1e-9049-4b14-9544-e26bc3ea2b38-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "aee31d1e-9049-4b14-9544-e26bc3ea2b38" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.435805 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aee31d1e-9049-4b14-9544-e26bc3ea2b38-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "aee31d1e-9049-4b14-9544-e26bc3ea2b38" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.435980 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aee31d1e-9049-4b14-9544-e26bc3ea2b38-kube-api-access-h64pb" (OuterVolumeSpecName: "kube-api-access-h64pb") pod "aee31d1e-9049-4b14-9544-e26bc3ea2b38" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38"). InnerVolumeSpecName "kube-api-access-h64pb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.438379 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "aee31d1e-9049-4b14-9544-e26bc3ea2b38" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.446219 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aee31d1e-9049-4b14-9544-e26bc3ea2b38-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "aee31d1e-9049-4b14-9544-e26bc3ea2b38" (UID: "aee31d1e-9049-4b14-9544-e26bc3ea2b38"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.530485 4899 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/aee31d1e-9049-4b14-9544-e26bc3ea2b38-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.530522 4899 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aee31d1e-9049-4b14-9544-e26bc3ea2b38-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.530534 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h64pb\" (UniqueName: \"kubernetes.io/projected/aee31d1e-9049-4b14-9544-e26bc3ea2b38-kube-api-access-h64pb\") on node \"crc\" DevicePath \"\"" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.530615 4899 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/aee31d1e-9049-4b14-9544-e26bc3ea2b38-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.530632 4899 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aee31d1e-9049-4b14-9544-e26bc3ea2b38-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.530641 4899 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/aee31d1e-9049-4b14-9544-e26bc3ea2b38-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.530653 4899 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/aee31d1e-9049-4b14-9544-e26bc3ea2b38-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.783084 4899 generic.go:334] "Generic (PLEG): container finished" podID="aee31d1e-9049-4b14-9544-e26bc3ea2b38" containerID="908a4b69aba1aa56fd8e0d2838bf125308a8a55589a0a3213076b22c27cbb5ed" exitCode=0 Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.783127 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.783126 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" event={"ID":"aee31d1e-9049-4b14-9544-e26bc3ea2b38","Type":"ContainerDied","Data":"908a4b69aba1aa56fd8e0d2838bf125308a8a55589a0a3213076b22c27cbb5ed"} Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.783318 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l5vs7" event={"ID":"aee31d1e-9049-4b14-9544-e26bc3ea2b38","Type":"ContainerDied","Data":"c74ed861137a1aa2151497bfd322a678dc0efd6429f1b187cc553e8b5147beb8"} Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.783369 4899 scope.go:117] "RemoveContainer" containerID="908a4b69aba1aa56fd8e0d2838bf125308a8a55589a0a3213076b22c27cbb5ed" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.797255 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l5vs7"] Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.802471 4899 scope.go:117] "RemoveContainer" containerID="908a4b69aba1aa56fd8e0d2838bf125308a8a55589a0a3213076b22c27cbb5ed" Oct 03 08:48:56 crc kubenswrapper[4899]: E1003 08:48:56.803045 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"908a4b69aba1aa56fd8e0d2838bf125308a8a55589a0a3213076b22c27cbb5ed\": container with ID starting with 908a4b69aba1aa56fd8e0d2838bf125308a8a55589a0a3213076b22c27cbb5ed not found: ID does not exist" containerID="908a4b69aba1aa56fd8e0d2838bf125308a8a55589a0a3213076b22c27cbb5ed" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.803087 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"908a4b69aba1aa56fd8e0d2838bf125308a8a55589a0a3213076b22c27cbb5ed"} err="failed to get container status \"908a4b69aba1aa56fd8e0d2838bf125308a8a55589a0a3213076b22c27cbb5ed\": rpc error: code = NotFound desc = could not find container \"908a4b69aba1aa56fd8e0d2838bf125308a8a55589a0a3213076b22c27cbb5ed\": container with ID starting with 908a4b69aba1aa56fd8e0d2838bf125308a8a55589a0a3213076b22c27cbb5ed not found: ID does not exist" Oct 03 08:48:56 crc kubenswrapper[4899]: I1003 08:48:56.804689 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l5vs7"] Oct 03 08:48:58 crc kubenswrapper[4899]: I1003 08:48:58.532824 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aee31d1e-9049-4b14-9544-e26bc3ea2b38" path="/var/lib/kubelet/pods/aee31d1e-9049-4b14-9544-e26bc3ea2b38/volumes" Oct 03 08:49:12 crc kubenswrapper[4899]: I1003 08:49:12.198778 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 08:49:12 crc kubenswrapper[4899]: I1003 08:49:12.199818 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 08:49:42 crc kubenswrapper[4899]: I1003 08:49:42.198462 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 08:49:42 crc kubenswrapper[4899]: I1003 08:49:42.198980 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.458755 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-prrpr"] Oct 03 08:50:07 crc kubenswrapper[4899]: E1003 08:50:07.459558 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aee31d1e-9049-4b14-9544-e26bc3ea2b38" containerName="registry" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.459575 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="aee31d1e-9049-4b14-9544-e26bc3ea2b38" containerName="registry" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.459710 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="aee31d1e-9049-4b14-9544-e26bc3ea2b38" containerName="registry" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.460254 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-prrpr" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.465401 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.465612 4899 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-68dfk" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.465799 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.473511 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-prrpr"] Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.493986 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-r2625"] Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.509564 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-r2625" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.515742 4899 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-zc4r9" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.521935 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-r2625"] Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.526642 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-jjw6p"] Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.527651 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-jjw6p" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.528621 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-jjw6p"] Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.529031 4899 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-wz4d5" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.575534 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pwkt\" (UniqueName: \"kubernetes.io/projected/9892f98e-dee3-42ea-88c5-2a17dc19988d-kube-api-access-4pwkt\") pod \"cert-manager-5b446d88c5-r2625\" (UID: \"9892f98e-dee3-42ea-88c5-2a17dc19988d\") " pod="cert-manager/cert-manager-5b446d88c5-r2625" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.575604 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlfsd\" (UniqueName: \"kubernetes.io/projected/00a2b8ef-3f94-4a34-8692-6fd9fa800cd9-kube-api-access-qlfsd\") pod \"cert-manager-cainjector-7f985d654d-prrpr\" (UID: \"00a2b8ef-3f94-4a34-8692-6fd9fa800cd9\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-prrpr" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.676517 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sv2vp\" (UniqueName: \"kubernetes.io/projected/355746c7-f59d-41d1-9cbe-c3668e16d478-kube-api-access-sv2vp\") pod \"cert-manager-webhook-5655c58dd6-jjw6p\" (UID: \"355746c7-f59d-41d1-9cbe-c3668e16d478\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-jjw6p" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.676631 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlfsd\" (UniqueName: \"kubernetes.io/projected/00a2b8ef-3f94-4a34-8692-6fd9fa800cd9-kube-api-access-qlfsd\") pod \"cert-manager-cainjector-7f985d654d-prrpr\" (UID: \"00a2b8ef-3f94-4a34-8692-6fd9fa800cd9\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-prrpr" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.676715 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pwkt\" (UniqueName: \"kubernetes.io/projected/9892f98e-dee3-42ea-88c5-2a17dc19988d-kube-api-access-4pwkt\") pod \"cert-manager-5b446d88c5-r2625\" (UID: \"9892f98e-dee3-42ea-88c5-2a17dc19988d\") " pod="cert-manager/cert-manager-5b446d88c5-r2625" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.696565 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlfsd\" (UniqueName: \"kubernetes.io/projected/00a2b8ef-3f94-4a34-8692-6fd9fa800cd9-kube-api-access-qlfsd\") pod \"cert-manager-cainjector-7f985d654d-prrpr\" (UID: \"00a2b8ef-3f94-4a34-8692-6fd9fa800cd9\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-prrpr" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.696811 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pwkt\" (UniqueName: \"kubernetes.io/projected/9892f98e-dee3-42ea-88c5-2a17dc19988d-kube-api-access-4pwkt\") pod \"cert-manager-5b446d88c5-r2625\" (UID: \"9892f98e-dee3-42ea-88c5-2a17dc19988d\") " pod="cert-manager/cert-manager-5b446d88c5-r2625" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.777809 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sv2vp\" (UniqueName: \"kubernetes.io/projected/355746c7-f59d-41d1-9cbe-c3668e16d478-kube-api-access-sv2vp\") pod \"cert-manager-webhook-5655c58dd6-jjw6p\" (UID: \"355746c7-f59d-41d1-9cbe-c3668e16d478\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-jjw6p" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.779630 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-prrpr" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.795235 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sv2vp\" (UniqueName: \"kubernetes.io/projected/355746c7-f59d-41d1-9cbe-c3668e16d478-kube-api-access-sv2vp\") pod \"cert-manager-webhook-5655c58dd6-jjw6p\" (UID: \"355746c7-f59d-41d1-9cbe-c3668e16d478\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-jjw6p" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.826527 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-r2625" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.843813 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-jjw6p" Oct 03 08:50:07 crc kubenswrapper[4899]: I1003 08:50:07.997091 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-prrpr"] Oct 03 08:50:08 crc kubenswrapper[4899]: I1003 08:50:08.007259 4899 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 08:50:08 crc kubenswrapper[4899]: I1003 08:50:08.080774 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-jjw6p"] Oct 03 08:50:08 crc kubenswrapper[4899]: W1003 08:50:08.088996 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod355746c7_f59d_41d1_9cbe_c3668e16d478.slice/crio-d04baa18013a5136711259f1e12dc4d4ebbdbc929d0a561a982bebf105a14afb WatchSource:0}: Error finding container d04baa18013a5136711259f1e12dc4d4ebbdbc929d0a561a982bebf105a14afb: Status 404 returned error can't find the container with id d04baa18013a5136711259f1e12dc4d4ebbdbc929d0a561a982bebf105a14afb Oct 03 08:50:08 crc kubenswrapper[4899]: I1003 08:50:08.112996 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-r2625"] Oct 03 08:50:08 crc kubenswrapper[4899]: W1003 08:50:08.117384 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9892f98e_dee3_42ea_88c5_2a17dc19988d.slice/crio-92deca0d0a5b75d102011efcbe16acdc84e2dc2d7ca5f48fcaf7e09235d52c6f WatchSource:0}: Error finding container 92deca0d0a5b75d102011efcbe16acdc84e2dc2d7ca5f48fcaf7e09235d52c6f: Status 404 returned error can't find the container with id 92deca0d0a5b75d102011efcbe16acdc84e2dc2d7ca5f48fcaf7e09235d52c6f Oct 03 08:50:08 crc kubenswrapper[4899]: I1003 08:50:08.134926 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-r2625" event={"ID":"9892f98e-dee3-42ea-88c5-2a17dc19988d","Type":"ContainerStarted","Data":"92deca0d0a5b75d102011efcbe16acdc84e2dc2d7ca5f48fcaf7e09235d52c6f"} Oct 03 08:50:08 crc kubenswrapper[4899]: I1003 08:50:08.136141 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-prrpr" event={"ID":"00a2b8ef-3f94-4a34-8692-6fd9fa800cd9","Type":"ContainerStarted","Data":"6e177a70133021a52d9168c5d2b4b31a2f6ca8ed183e14933992b4295590a34c"} Oct 03 08:50:08 crc kubenswrapper[4899]: I1003 08:50:08.136979 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-jjw6p" event={"ID":"355746c7-f59d-41d1-9cbe-c3668e16d478","Type":"ContainerStarted","Data":"d04baa18013a5136711259f1e12dc4d4ebbdbc929d0a561a982bebf105a14afb"} Oct 03 08:50:12 crc kubenswrapper[4899]: I1003 08:50:12.158435 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-r2625" event={"ID":"9892f98e-dee3-42ea-88c5-2a17dc19988d","Type":"ContainerStarted","Data":"e3c3373d3ec72b6cbd015a2aa0c360d5ffbcbcd1ff991e789ff463c90e2822df"} Oct 03 08:50:12 crc kubenswrapper[4899]: I1003 08:50:12.160992 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-prrpr" event={"ID":"00a2b8ef-3f94-4a34-8692-6fd9fa800cd9","Type":"ContainerStarted","Data":"c5388c49c81e70694f4526be35259f5ae4c943b681a740304b9bd86a9d476ebe"} Oct 03 08:50:12 crc kubenswrapper[4899]: I1003 08:50:12.162450 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-jjw6p" event={"ID":"355746c7-f59d-41d1-9cbe-c3668e16d478","Type":"ContainerStarted","Data":"ffc83ba9f7ebc1dbcc2004148ed4990c55bc10c6fb38be1c67cf3aaac8b5900c"} Oct 03 08:50:12 crc kubenswrapper[4899]: I1003 08:50:12.162610 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-jjw6p" Oct 03 08:50:12 crc kubenswrapper[4899]: I1003 08:50:12.184366 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-r2625" podStartSLOduration=2.144225385 podStartE2EDuration="5.184348439s" podCreationTimestamp="2025-10-03 08:50:07 +0000 UTC" firstStartedPulling="2025-10-03 08:50:08.119790773 +0000 UTC m=+582.227275726" lastFinishedPulling="2025-10-03 08:50:11.159913837 +0000 UTC m=+585.267398780" observedRunningTime="2025-10-03 08:50:12.172505555 +0000 UTC m=+586.279990508" watchObservedRunningTime="2025-10-03 08:50:12.184348439 +0000 UTC m=+586.291833392" Oct 03 08:50:12 crc kubenswrapper[4899]: I1003 08:50:12.193566 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-jjw6p" podStartSLOduration=2.197365768 podStartE2EDuration="5.193550909s" podCreationTimestamp="2025-10-03 08:50:07 +0000 UTC" firstStartedPulling="2025-10-03 08:50:08.09428588 +0000 UTC m=+582.201770833" lastFinishedPulling="2025-10-03 08:50:11.090471021 +0000 UTC m=+585.197955974" observedRunningTime="2025-10-03 08:50:12.192452365 +0000 UTC m=+586.299937318" watchObservedRunningTime="2025-10-03 08:50:12.193550909 +0000 UTC m=+586.301035862" Oct 03 08:50:12 crc kubenswrapper[4899]: I1003 08:50:12.198751 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 08:50:12 crc kubenswrapper[4899]: I1003 08:50:12.198797 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 08:50:12 crc kubenswrapper[4899]: I1003 08:50:12.198877 4899 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:50:12 crc kubenswrapper[4899]: I1003 08:50:12.199396 4899 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6317475fe02e5878e0a0ad7c510b598f3249a496df5a7826416b0c9a1c2435dc"} pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 08:50:12 crc kubenswrapper[4899]: I1003 08:50:12.199449 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" containerID="cri-o://6317475fe02e5878e0a0ad7c510b598f3249a496df5a7826416b0c9a1c2435dc" gracePeriod=600 Oct 03 08:50:12 crc kubenswrapper[4899]: E1003 08:50:12.282535 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3e8a7198_81da_475c_ac88_a460ba4064d1.slice/crio-6317475fe02e5878e0a0ad7c510b598f3249a496df5a7826416b0c9a1c2435dc.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3e8a7198_81da_475c_ac88_a460ba4064d1.slice/crio-conmon-6317475fe02e5878e0a0ad7c510b598f3249a496df5a7826416b0c9a1c2435dc.scope\": RecentStats: unable to find data in memory cache]" Oct 03 08:50:13 crc kubenswrapper[4899]: I1003 08:50:13.169913 4899 generic.go:334] "Generic (PLEG): container finished" podID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerID="6317475fe02e5878e0a0ad7c510b598f3249a496df5a7826416b0c9a1c2435dc" exitCode=0 Oct 03 08:50:13 crc kubenswrapper[4899]: I1003 08:50:13.169936 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerDied","Data":"6317475fe02e5878e0a0ad7c510b598f3249a496df5a7826416b0c9a1c2435dc"} Oct 03 08:50:13 crc kubenswrapper[4899]: I1003 08:50:13.171510 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerStarted","Data":"0b65c263cdff33924e77f87b039bd41bb8cbe3269904b715ae19db8a65d88bf8"} Oct 03 08:50:13 crc kubenswrapper[4899]: I1003 08:50:13.171544 4899 scope.go:117] "RemoveContainer" containerID="c575796f37c9e9cc7bfd1aa952849ac3387d60b1b8fe207a0308043edd984a52" Oct 03 08:50:13 crc kubenswrapper[4899]: I1003 08:50:13.187378 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-prrpr" podStartSLOduration=3.104039387 podStartE2EDuration="6.187361082s" podCreationTimestamp="2025-10-03 08:50:07 +0000 UTC" firstStartedPulling="2025-10-03 08:50:08.007062533 +0000 UTC m=+582.114547486" lastFinishedPulling="2025-10-03 08:50:11.090384228 +0000 UTC m=+585.197869181" observedRunningTime="2025-10-03 08:50:12.206424965 +0000 UTC m=+586.313909918" watchObservedRunningTime="2025-10-03 08:50:13.187361082 +0000 UTC m=+587.294846035" Oct 03 08:50:17 crc kubenswrapper[4899]: I1003 08:50:17.668324 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-g7f7c"] Oct 03 08:50:17 crc kubenswrapper[4899]: I1003 08:50:17.669100 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovn-controller" containerID="cri-o://e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e" gracePeriod=30 Oct 03 08:50:17 crc kubenswrapper[4899]: I1003 08:50:17.669135 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6" gracePeriod=30 Oct 03 08:50:17 crc kubenswrapper[4899]: I1003 08:50:17.669145 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="nbdb" containerID="cri-o://3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b" gracePeriod=30 Oct 03 08:50:17 crc kubenswrapper[4899]: I1003 08:50:17.669202 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovn-acl-logging" containerID="cri-o://8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68" gracePeriod=30 Oct 03 08:50:17 crc kubenswrapper[4899]: I1003 08:50:17.669202 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="northd" containerID="cri-o://907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad" gracePeriod=30 Oct 03 08:50:17 crc kubenswrapper[4899]: I1003 08:50:17.669272 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="sbdb" containerID="cri-o://f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e" gracePeriod=30 Oct 03 08:50:17 crc kubenswrapper[4899]: I1003 08:50:17.669202 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="kube-rbac-proxy-node" containerID="cri-o://36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72" gracePeriod=30 Oct 03 08:50:17 crc kubenswrapper[4899]: I1003 08:50:17.708298 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovnkube-controller" containerID="cri-o://e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331" gracePeriod=30 Oct 03 08:50:17 crc kubenswrapper[4899]: I1003 08:50:17.847028 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-jjw6p" Oct 03 08:50:17 crc kubenswrapper[4899]: I1003 08:50:17.999511 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovnkube-controller/3.log" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.002098 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovn-acl-logging/0.log" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.002825 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovn-controller/0.log" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.003295 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054355 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-ndqm8"] Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.054555 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="kube-rbac-proxy-ovn-metrics" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054565 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="kube-rbac-proxy-ovn-metrics" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.054574 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="kubecfg-setup" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054581 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="kubecfg-setup" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.054590 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="northd" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054595 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="northd" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.054604 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovnkube-controller" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054610 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovnkube-controller" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.054619 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovn-acl-logging" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054624 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovn-acl-logging" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.054632 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="nbdb" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054637 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="nbdb" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.054648 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovnkube-controller" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054655 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovnkube-controller" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.054664 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="kube-rbac-proxy-node" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054670 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="kube-rbac-proxy-node" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.054680 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovnkube-controller" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054685 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovnkube-controller" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.054690 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovnkube-controller" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054696 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovnkube-controller" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.054704 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="sbdb" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054709 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="sbdb" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.054719 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovn-controller" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054724 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovn-controller" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054816 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="kube-rbac-proxy-ovn-metrics" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054823 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="northd" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054832 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovnkube-controller" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054840 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="nbdb" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054847 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovnkube-controller" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054854 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovnkube-controller" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054861 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovnkube-controller" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054867 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovn-acl-logging" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054873 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="kube-rbac-proxy-node" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054879 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="sbdb" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054901 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovn-controller" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.054988 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovnkube-controller" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.054994 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovnkube-controller" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.055067 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerName="ovnkube-controller" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.056963 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109046 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-run-ovn\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109091 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-log-socket\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109128 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-run-openvswitch\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109150 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-run-systemd\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109166 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-cni-netd\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109194 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-systemd-units\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109243 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/764a7341-6f52-4fc1-9086-87b90aa126e8-ovnkube-script-lib\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109257 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-kubelet\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109280 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109295 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-slash\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109312 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-run-netns\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109328 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/764a7341-6f52-4fc1-9086-87b90aa126e8-env-overrides\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109342 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-node-log\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109356 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-var-lib-openvswitch\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109368 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-etc-openvswitch\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109387 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-cni-bin\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109403 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/764a7341-6f52-4fc1-9086-87b90aa126e8-ovn-node-metrics-cert\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109426 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/764a7341-6f52-4fc1-9086-87b90aa126e8-ovnkube-config\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109443 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7fgv\" (UniqueName: \"kubernetes.io/projected/764a7341-6f52-4fc1-9086-87b90aa126e8-kube-api-access-s7fgv\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109462 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-run-ovn-kubernetes\") pod \"764a7341-6f52-4fc1-9086-87b90aa126e8\" (UID: \"764a7341-6f52-4fc1-9086-87b90aa126e8\") " Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109656 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109688 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109726 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-log-socket" (OuterVolumeSpecName: "log-socket") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.109741 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.110034 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-node-log" (OuterVolumeSpecName: "node-log") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.110054 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.110097 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.110072 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.110085 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.110140 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.110144 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.110236 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-slash" (OuterVolumeSpecName: "host-slash") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.110246 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.110328 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/764a7341-6f52-4fc1-9086-87b90aa126e8-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.110415 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/764a7341-6f52-4fc1-9086-87b90aa126e8-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.110453 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.110469 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/764a7341-6f52-4fc1-9086-87b90aa126e8-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.115378 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/764a7341-6f52-4fc1-9086-87b90aa126e8-kube-api-access-s7fgv" (OuterVolumeSpecName: "kube-api-access-s7fgv") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "kube-api-access-s7fgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.115564 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/764a7341-6f52-4fc1-9086-87b90aa126e8-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.124866 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "764a7341-6f52-4fc1-9086-87b90aa126e8" (UID: "764a7341-6f52-4fc1-9086-87b90aa126e8"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.197916 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovnkube-controller/3.log" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.200105 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovn-acl-logging/0.log" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.200535 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f7c_764a7341-6f52-4fc1-9086-87b90aa126e8/ovn-controller/0.log" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.200912 4899 generic.go:334] "Generic (PLEG): container finished" podID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerID="e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331" exitCode=0 Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.200949 4899 generic.go:334] "Generic (PLEG): container finished" podID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerID="f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e" exitCode=0 Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.200959 4899 generic.go:334] "Generic (PLEG): container finished" podID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerID="3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b" exitCode=0 Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.200969 4899 generic.go:334] "Generic (PLEG): container finished" podID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerID="907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad" exitCode=0 Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.200978 4899 generic.go:334] "Generic (PLEG): container finished" podID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerID="265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6" exitCode=0 Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.200985 4899 generic.go:334] "Generic (PLEG): container finished" podID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerID="36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72" exitCode=0 Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.200992 4899 generic.go:334] "Generic (PLEG): container finished" podID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerID="8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68" exitCode=143 Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201001 4899 generic.go:334] "Generic (PLEG): container finished" podID="764a7341-6f52-4fc1-9086-87b90aa126e8" containerID="e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e" exitCode=143 Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201135 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201449 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerDied","Data":"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201494 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerDied","Data":"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201510 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerDied","Data":"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201522 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerDied","Data":"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201532 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerDied","Data":"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201541 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerDied","Data":"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201552 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201563 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201571 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201578 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201585 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201592 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201598 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201605 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201750 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201595 4899 scope.go:117] "RemoveContainer" containerID="e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201953 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerDied","Data":"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201981 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201991 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.201998 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202004 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202011 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202017 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202024 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202032 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202039 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202044 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202052 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerDied","Data":"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202060 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202066 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202095 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202101 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202106 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202112 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202117 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202122 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202127 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202132 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202140 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f7c" event={"ID":"764a7341-6f52-4fc1-9086-87b90aa126e8","Type":"ContainerDied","Data":"adf5c7a5ad44609f6b8e85e4025e2c27f82e46e26d5a3c6f5f840e5339652b80"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202149 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202157 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202163 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202170 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202177 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202183 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202191 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202198 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202220 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.202227 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.203222 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgdhq_6f75d8d8-3b12-42bf-b447-0afb4413fd54/kube-multus/2.log" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.203607 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgdhq_6f75d8d8-3b12-42bf-b447-0afb4413fd54/kube-multus/1.log" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.203644 4899 generic.go:334] "Generic (PLEG): container finished" podID="6f75d8d8-3b12-42bf-b447-0afb4413fd54" containerID="46a6450698daebd9728ad0b6a8bde26f5c4b700e695dc1dc1f7d33663c96b564" exitCode=2 Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.203668 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pgdhq" event={"ID":"6f75d8d8-3b12-42bf-b447-0afb4413fd54","Type":"ContainerDied","Data":"46a6450698daebd9728ad0b6a8bde26f5c4b700e695dc1dc1f7d33663c96b564"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.203683 4899 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b2f7940cc405b005b23f102def919753d7820ecc4e45db9f25cb87611611f4dc"} Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.204161 4899 scope.go:117] "RemoveContainer" containerID="46a6450698daebd9728ad0b6a8bde26f5c4b700e695dc1dc1f7d33663c96b564" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.204412 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-pgdhq_openshift-multus(6f75d8d8-3b12-42bf-b447-0afb4413fd54)\"" pod="openshift-multus/multus-pgdhq" podUID="6f75d8d8-3b12-42bf-b447-0afb4413fd54" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.210344 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-cni-bin\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.210380 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-slash\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.210407 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f53c8573-e9dc-40a7-9ef8-215a7813940b-ovnkube-script-lib\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.210433 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-run-ovn-kubernetes\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.210475 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-etc-openvswitch\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.210525 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-run-openvswitch\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.210542 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-run-ovn\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.210557 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f53c8573-e9dc-40a7-9ef8-215a7813940b-ovnkube-config\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.210579 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq76k\" (UniqueName: \"kubernetes.io/projected/f53c8573-e9dc-40a7-9ef8-215a7813940b-kube-api-access-dq76k\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.210649 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-cni-netd\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.210821 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-kubelet\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.210862 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-var-lib-openvswitch\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.210878 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f53c8573-e9dc-40a7-9ef8-215a7813940b-env-overrides\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.210910 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-systemd-units\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.210928 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-run-systemd\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.210946 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-node-log\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.210961 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f53c8573-e9dc-40a7-9ef8-215a7813940b-ovn-node-metrics-cert\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.210980 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-log-socket\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211069 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211094 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-run-netns\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211159 4899 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211172 4899 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211182 4899 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-log-socket\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211191 4899 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211202 4899 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211214 4899 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211226 4899 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211239 4899 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/764a7341-6f52-4fc1-9086-87b90aa126e8-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211246 4899 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211254 4899 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211263 4899 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-slash\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211270 4899 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211280 4899 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/764a7341-6f52-4fc1-9086-87b90aa126e8-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211289 4899 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-node-log\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211300 4899 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211310 4899 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211320 4899 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/764a7341-6f52-4fc1-9086-87b90aa126e8-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211331 4899 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/764a7341-6f52-4fc1-9086-87b90aa126e8-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211343 4899 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/764a7341-6f52-4fc1-9086-87b90aa126e8-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.211354 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7fgv\" (UniqueName: \"kubernetes.io/projected/764a7341-6f52-4fc1-9086-87b90aa126e8-kube-api-access-s7fgv\") on node \"crc\" DevicePath \"\"" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.225129 4899 scope.go:117] "RemoveContainer" containerID="4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.238828 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-g7f7c"] Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.241369 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-g7f7c"] Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.254947 4899 scope.go:117] "RemoveContainer" containerID="f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.267520 4899 scope.go:117] "RemoveContainer" containerID="3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.280686 4899 scope.go:117] "RemoveContainer" containerID="907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.292203 4899 scope.go:117] "RemoveContainer" containerID="265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.307016 4899 scope.go:117] "RemoveContainer" containerID="36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312300 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-etc-openvswitch\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312348 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-run-openvswitch\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312365 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-run-ovn\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312383 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f53c8573-e9dc-40a7-9ef8-215a7813940b-ovnkube-config\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312405 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq76k\" (UniqueName: \"kubernetes.io/projected/f53c8573-e9dc-40a7-9ef8-215a7813940b-kube-api-access-dq76k\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312422 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-cni-netd\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312437 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-etc-openvswitch\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312447 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-run-openvswitch\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312444 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-kubelet\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312497 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-cni-netd\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312506 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f53c8573-e9dc-40a7-9ef8-215a7813940b-env-overrides\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312526 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-var-lib-openvswitch\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312546 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-run-systemd\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312537 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-run-ovn\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312579 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-systemd-units\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312561 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-systemd-units\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312692 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-var-lib-openvswitch\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312749 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-run-systemd\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312787 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-kubelet\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312793 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-node-log\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312756 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-node-log\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312856 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f53c8573-e9dc-40a7-9ef8-215a7813940b-ovn-node-metrics-cert\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312876 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-log-socket\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312926 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312951 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-run-netns\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312970 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-cni-bin\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.312988 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-slash\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.313014 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f53c8573-e9dc-40a7-9ef8-215a7813940b-ovnkube-script-lib\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.313039 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-run-ovn-kubernetes\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.313315 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f53c8573-e9dc-40a7-9ef8-215a7813940b-env-overrides\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.313369 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-cni-bin\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.313396 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-slash\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.313426 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f53c8573-e9dc-40a7-9ef8-215a7813940b-ovnkube-config\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.313465 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.313520 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-log-socket\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.313594 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-run-ovn-kubernetes\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.313629 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f53c8573-e9dc-40a7-9ef8-215a7813940b-host-run-netns\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.313966 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f53c8573-e9dc-40a7-9ef8-215a7813940b-ovnkube-script-lib\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.321091 4899 scope.go:117] "RemoveContainer" containerID="8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.321331 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f53c8573-e9dc-40a7-9ef8-215a7813940b-ovn-node-metrics-cert\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.327302 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq76k\" (UniqueName: \"kubernetes.io/projected/f53c8573-e9dc-40a7-9ef8-215a7813940b-kube-api-access-dq76k\") pod \"ovnkube-node-ndqm8\" (UID: \"f53c8573-e9dc-40a7-9ef8-215a7813940b\") " pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.333205 4899 scope.go:117] "RemoveContainer" containerID="e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.345966 4899 scope.go:117] "RemoveContainer" containerID="965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.357827 4899 scope.go:117] "RemoveContainer" containerID="e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.358689 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331\": container with ID starting with e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331 not found: ID does not exist" containerID="e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.358742 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331"} err="failed to get container status \"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331\": rpc error: code = NotFound desc = could not find container \"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331\": container with ID starting with e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.358779 4899 scope.go:117] "RemoveContainer" containerID="4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.359320 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee\": container with ID starting with 4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee not found: ID does not exist" containerID="4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.359353 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee"} err="failed to get container status \"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee\": rpc error: code = NotFound desc = could not find container \"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee\": container with ID starting with 4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.359367 4899 scope.go:117] "RemoveContainer" containerID="f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.359826 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\": container with ID starting with f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e not found: ID does not exist" containerID="f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.359850 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e"} err="failed to get container status \"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\": rpc error: code = NotFound desc = could not find container \"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\": container with ID starting with f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.359865 4899 scope.go:117] "RemoveContainer" containerID="3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.360198 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\": container with ID starting with 3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b not found: ID does not exist" containerID="3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.360226 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b"} err="failed to get container status \"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\": rpc error: code = NotFound desc = could not find container \"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\": container with ID starting with 3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.360242 4899 scope.go:117] "RemoveContainer" containerID="907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.361253 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\": container with ID starting with 907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad not found: ID does not exist" containerID="907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.361278 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad"} err="failed to get container status \"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\": rpc error: code = NotFound desc = could not find container \"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\": container with ID starting with 907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.361291 4899 scope.go:117] "RemoveContainer" containerID="265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.364652 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\": container with ID starting with 265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6 not found: ID does not exist" containerID="265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.364692 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6"} err="failed to get container status \"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\": rpc error: code = NotFound desc = could not find container \"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\": container with ID starting with 265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.364713 4899 scope.go:117] "RemoveContainer" containerID="36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.365436 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\": container with ID starting with 36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72 not found: ID does not exist" containerID="36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.365511 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72"} err="failed to get container status \"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\": rpc error: code = NotFound desc = could not find container \"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\": container with ID starting with 36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.365576 4899 scope.go:117] "RemoveContainer" containerID="8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.366209 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\": container with ID starting with 8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68 not found: ID does not exist" containerID="8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.366239 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68"} err="failed to get container status \"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\": rpc error: code = NotFound desc = could not find container \"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\": container with ID starting with 8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.366261 4899 scope.go:117] "RemoveContainer" containerID="e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.366707 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\": container with ID starting with e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e not found: ID does not exist" containerID="e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.366726 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e"} err="failed to get container status \"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\": rpc error: code = NotFound desc = could not find container \"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\": container with ID starting with e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.366741 4899 scope.go:117] "RemoveContainer" containerID="965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606" Oct 03 08:50:18 crc kubenswrapper[4899]: E1003 08:50:18.366968 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\": container with ID starting with 965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606 not found: ID does not exist" containerID="965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.366988 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606"} err="failed to get container status \"965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\": rpc error: code = NotFound desc = could not find container \"965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\": container with ID starting with 965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.367000 4899 scope.go:117] "RemoveContainer" containerID="e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.367658 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331"} err="failed to get container status \"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331\": rpc error: code = NotFound desc = could not find container \"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331\": container with ID starting with e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.367675 4899 scope.go:117] "RemoveContainer" containerID="4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.368204 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee"} err="failed to get container status \"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee\": rpc error: code = NotFound desc = could not find container \"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee\": container with ID starting with 4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.368224 4899 scope.go:117] "RemoveContainer" containerID="f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.368571 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e"} err="failed to get container status \"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\": rpc error: code = NotFound desc = could not find container \"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\": container with ID starting with f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.368605 4899 scope.go:117] "RemoveContainer" containerID="3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.368947 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b"} err="failed to get container status \"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\": rpc error: code = NotFound desc = could not find container \"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\": container with ID starting with 3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.368968 4899 scope.go:117] "RemoveContainer" containerID="907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.369193 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.369190 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad"} err="failed to get container status \"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\": rpc error: code = NotFound desc = could not find container \"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\": container with ID starting with 907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.369294 4899 scope.go:117] "RemoveContainer" containerID="265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.370573 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6"} err="failed to get container status \"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\": rpc error: code = NotFound desc = could not find container \"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\": container with ID starting with 265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.370608 4899 scope.go:117] "RemoveContainer" containerID="36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.371130 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72"} err="failed to get container status \"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\": rpc error: code = NotFound desc = could not find container \"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\": container with ID starting with 36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.371155 4899 scope.go:117] "RemoveContainer" containerID="8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.371378 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68"} err="failed to get container status \"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\": rpc error: code = NotFound desc = could not find container \"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\": container with ID starting with 8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.371395 4899 scope.go:117] "RemoveContainer" containerID="e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.371645 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e"} err="failed to get container status \"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\": rpc error: code = NotFound desc = could not find container \"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\": container with ID starting with e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.371663 4899 scope.go:117] "RemoveContainer" containerID="965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.371915 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606"} err="failed to get container status \"965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\": rpc error: code = NotFound desc = could not find container \"965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\": container with ID starting with 965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.371938 4899 scope.go:117] "RemoveContainer" containerID="e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.372203 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331"} err="failed to get container status \"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331\": rpc error: code = NotFound desc = could not find container \"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331\": container with ID starting with e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.372231 4899 scope.go:117] "RemoveContainer" containerID="4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.372491 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee"} err="failed to get container status \"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee\": rpc error: code = NotFound desc = could not find container \"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee\": container with ID starting with 4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.372513 4899 scope.go:117] "RemoveContainer" containerID="f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.372942 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e"} err="failed to get container status \"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\": rpc error: code = NotFound desc = could not find container \"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\": container with ID starting with f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.372962 4899 scope.go:117] "RemoveContainer" containerID="3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.373208 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b"} err="failed to get container status \"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\": rpc error: code = NotFound desc = could not find container \"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\": container with ID starting with 3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.373224 4899 scope.go:117] "RemoveContainer" containerID="907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.373454 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad"} err="failed to get container status \"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\": rpc error: code = NotFound desc = could not find container \"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\": container with ID starting with 907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.373493 4899 scope.go:117] "RemoveContainer" containerID="265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.373739 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6"} err="failed to get container status \"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\": rpc error: code = NotFound desc = could not find container \"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\": container with ID starting with 265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.373761 4899 scope.go:117] "RemoveContainer" containerID="36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.374024 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72"} err="failed to get container status \"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\": rpc error: code = NotFound desc = could not find container \"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\": container with ID starting with 36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.374046 4899 scope.go:117] "RemoveContainer" containerID="8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.374258 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68"} err="failed to get container status \"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\": rpc error: code = NotFound desc = could not find container \"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\": container with ID starting with 8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.374275 4899 scope.go:117] "RemoveContainer" containerID="e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.374474 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e"} err="failed to get container status \"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\": rpc error: code = NotFound desc = could not find container \"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\": container with ID starting with e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.374488 4899 scope.go:117] "RemoveContainer" containerID="965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.374644 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606"} err="failed to get container status \"965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\": rpc error: code = NotFound desc = could not find container \"965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\": container with ID starting with 965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.374663 4899 scope.go:117] "RemoveContainer" containerID="e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.374866 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331"} err="failed to get container status \"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331\": rpc error: code = NotFound desc = could not find container \"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331\": container with ID starting with e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.374883 4899 scope.go:117] "RemoveContainer" containerID="4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.375114 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee"} err="failed to get container status \"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee\": rpc error: code = NotFound desc = could not find container \"4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee\": container with ID starting with 4902f94e75c5598a2e34e736116f11cd1a207d7497c9e340a39f9e913b8ce1ee not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.375267 4899 scope.go:117] "RemoveContainer" containerID="f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.375747 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e"} err="failed to get container status \"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\": rpc error: code = NotFound desc = could not find container \"f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e\": container with ID starting with f6dc075b6b1e0fbc00127f1fde59f3c3ebf7011111333851748b3148c305a86e not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.375767 4899 scope.go:117] "RemoveContainer" containerID="3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.376049 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b"} err="failed to get container status \"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\": rpc error: code = NotFound desc = could not find container \"3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b\": container with ID starting with 3f3e0312f1360d1e939fb66eb1e2f998203bd53ff3b3b6867f256337fa0c236b not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.376078 4899 scope.go:117] "RemoveContainer" containerID="907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.376342 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad"} err="failed to get container status \"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\": rpc error: code = NotFound desc = could not find container \"907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad\": container with ID starting with 907a50d2c4e95993115bd3d3581da644f639b88dc8d4dcc2182733397c8763ad not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.376364 4899 scope.go:117] "RemoveContainer" containerID="265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.376676 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6"} err="failed to get container status \"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\": rpc error: code = NotFound desc = could not find container \"265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6\": container with ID starting with 265f6a5df084956ee8e8eee3469b5a6533b785393de240600f04292496ff3aa6 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.376698 4899 scope.go:117] "RemoveContainer" containerID="36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.376983 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72"} err="failed to get container status \"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\": rpc error: code = NotFound desc = could not find container \"36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72\": container with ID starting with 36b6bdce0323c79fe210e61b93e8c88e494441c8eed1a8b566e918edbde3cf72 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.377014 4899 scope.go:117] "RemoveContainer" containerID="8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.377289 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68"} err="failed to get container status \"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\": rpc error: code = NotFound desc = could not find container \"8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68\": container with ID starting with 8919a751ca1f056ee16ff6276acd8ac9a96f5e79544b53c53587df240a0d9c68 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.377318 4899 scope.go:117] "RemoveContainer" containerID="e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.377540 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e"} err="failed to get container status \"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\": rpc error: code = NotFound desc = could not find container \"e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e\": container with ID starting with e682d467e30d2463994106ee41fc6e4dcfc237c649ca1954b5af7c555717837e not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.377581 4899 scope.go:117] "RemoveContainer" containerID="965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.377835 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606"} err="failed to get container status \"965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\": rpc error: code = NotFound desc = could not find container \"965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606\": container with ID starting with 965de176f4d5d3b0d54af387350a535ec06e159dcb378da8fa37da99e05f3606 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.377865 4899 scope.go:117] "RemoveContainer" containerID="e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.378829 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331"} err="failed to get container status \"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331\": rpc error: code = NotFound desc = could not find container \"e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331\": container with ID starting with e1d49a6fadc3124aed5b9c36870bfe330ff7026388139f9f8d540c93c4206331 not found: ID does not exist" Oct 03 08:50:18 crc kubenswrapper[4899]: I1003 08:50:18.533432 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="764a7341-6f52-4fc1-9086-87b90aa126e8" path="/var/lib/kubelet/pods/764a7341-6f52-4fc1-9086-87b90aa126e8/volumes" Oct 03 08:50:19 crc kubenswrapper[4899]: I1003 08:50:19.211803 4899 generic.go:334] "Generic (PLEG): container finished" podID="f53c8573-e9dc-40a7-9ef8-215a7813940b" containerID="f14b65c08dd4e516e707ed66a137503791d4da3ebb249c18c3cb3c80b5d151f3" exitCode=0 Oct 03 08:50:19 crc kubenswrapper[4899]: I1003 08:50:19.211840 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" event={"ID":"f53c8573-e9dc-40a7-9ef8-215a7813940b","Type":"ContainerDied","Data":"f14b65c08dd4e516e707ed66a137503791d4da3ebb249c18c3cb3c80b5d151f3"} Oct 03 08:50:19 crc kubenswrapper[4899]: I1003 08:50:19.211868 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" event={"ID":"f53c8573-e9dc-40a7-9ef8-215a7813940b","Type":"ContainerStarted","Data":"fb6eb56b0a2b587d0a83e8c54167cddb8221ddd498fdc963b7436b18aeb8e0e1"} Oct 03 08:50:20 crc kubenswrapper[4899]: I1003 08:50:20.220538 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" event={"ID":"f53c8573-e9dc-40a7-9ef8-215a7813940b","Type":"ContainerStarted","Data":"c8d752cf3fc16a6e33ce5eb9b07f243c646fa1482d754ad4e8cbbdd776720345"} Oct 03 08:50:20 crc kubenswrapper[4899]: I1003 08:50:20.220932 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" event={"ID":"f53c8573-e9dc-40a7-9ef8-215a7813940b","Type":"ContainerStarted","Data":"bceb410cc7dfeff6303fc648f150e1aa86b357a0f6dd985a3382f9293a9425f4"} Oct 03 08:50:20 crc kubenswrapper[4899]: I1003 08:50:20.220956 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" event={"ID":"f53c8573-e9dc-40a7-9ef8-215a7813940b","Type":"ContainerStarted","Data":"865bbf69be2eec6550f95b5ea6d1e572354f51c64a60d290c3062c610e3a7a8e"} Oct 03 08:50:20 crc kubenswrapper[4899]: I1003 08:50:20.220972 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" event={"ID":"f53c8573-e9dc-40a7-9ef8-215a7813940b","Type":"ContainerStarted","Data":"29b2fd4d15f0a869dc80e6b7d15891ab7979f2ce93471ea68ebdcac3ad9b6ed0"} Oct 03 08:50:20 crc kubenswrapper[4899]: I1003 08:50:20.220985 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" event={"ID":"f53c8573-e9dc-40a7-9ef8-215a7813940b","Type":"ContainerStarted","Data":"71d2657ac066c1b28cf6c8f0d1b834402cd1cc651338d912e9445eed10c2b818"} Oct 03 08:50:20 crc kubenswrapper[4899]: I1003 08:50:20.221000 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" event={"ID":"f53c8573-e9dc-40a7-9ef8-215a7813940b","Type":"ContainerStarted","Data":"be8096f6b38ec3ca7b34c1209586be12c5ef331e67b9a5ec6e225a34995ff16e"} Oct 03 08:50:22 crc kubenswrapper[4899]: I1003 08:50:22.234226 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" event={"ID":"f53c8573-e9dc-40a7-9ef8-215a7813940b","Type":"ContainerStarted","Data":"882c521b7a3a54aebe6216c2337c5c2412ebab8562f603ca001b49d76cf30144"} Oct 03 08:50:24 crc kubenswrapper[4899]: I1003 08:50:24.245569 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" event={"ID":"f53c8573-e9dc-40a7-9ef8-215a7813940b","Type":"ContainerStarted","Data":"f57f966ca73bb001d9a34fde98cb847edc2fd11b321ee4c2648d9f5297a6dc4b"} Oct 03 08:50:24 crc kubenswrapper[4899]: I1003 08:50:24.245916 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:24 crc kubenswrapper[4899]: I1003 08:50:24.245930 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:24 crc kubenswrapper[4899]: I1003 08:50:24.245939 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:24 crc kubenswrapper[4899]: I1003 08:50:24.270654 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:24 crc kubenswrapper[4899]: I1003 08:50:24.270778 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:24 crc kubenswrapper[4899]: I1003 08:50:24.272259 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" podStartSLOduration=6.272247839 podStartE2EDuration="6.272247839s" podCreationTimestamp="2025-10-03 08:50:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:50:24.271504196 +0000 UTC m=+598.378989149" watchObservedRunningTime="2025-10-03 08:50:24.272247839 +0000 UTC m=+598.379732792" Oct 03 08:50:26 crc kubenswrapper[4899]: I1003 08:50:26.647372 4899 scope.go:117] "RemoveContainer" containerID="b2f7940cc405b005b23f102def919753d7820ecc4e45db9f25cb87611611f4dc" Oct 03 08:50:27 crc kubenswrapper[4899]: I1003 08:50:27.260683 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgdhq_6f75d8d8-3b12-42bf-b447-0afb4413fd54/kube-multus/2.log" Oct 03 08:50:33 crc kubenswrapper[4899]: I1003 08:50:33.527292 4899 scope.go:117] "RemoveContainer" containerID="46a6450698daebd9728ad0b6a8bde26f5c4b700e695dc1dc1f7d33663c96b564" Oct 03 08:50:33 crc kubenswrapper[4899]: E1003 08:50:33.527769 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-pgdhq_openshift-multus(6f75d8d8-3b12-42bf-b447-0afb4413fd54)\"" pod="openshift-multus/multus-pgdhq" podUID="6f75d8d8-3b12-42bf-b447-0afb4413fd54" Oct 03 08:50:48 crc kubenswrapper[4899]: I1003 08:50:48.397088 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ndqm8" Oct 03 08:50:48 crc kubenswrapper[4899]: I1003 08:50:48.527421 4899 scope.go:117] "RemoveContainer" containerID="46a6450698daebd9728ad0b6a8bde26f5c4b700e695dc1dc1f7d33663c96b564" Oct 03 08:50:49 crc kubenswrapper[4899]: I1003 08:50:49.374494 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgdhq_6f75d8d8-3b12-42bf-b447-0afb4413fd54/kube-multus/2.log" Oct 03 08:50:49 crc kubenswrapper[4899]: I1003 08:50:49.375051 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pgdhq" event={"ID":"6f75d8d8-3b12-42bf-b447-0afb4413fd54","Type":"ContainerStarted","Data":"af131c7a9839d5ec0972fe292cf572a76b7508962771caadbee9e6b7f6d81ae3"} Oct 03 08:50:58 crc kubenswrapper[4899]: I1003 08:50:58.834020 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45"] Oct 03 08:50:58 crc kubenswrapper[4899]: I1003 08:50:58.835612 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45" Oct 03 08:50:58 crc kubenswrapper[4899]: I1003 08:50:58.837871 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 03 08:50:58 crc kubenswrapper[4899]: I1003 08:50:58.841868 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45"] Oct 03 08:50:58 crc kubenswrapper[4899]: I1003 08:50:58.908294 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c4f4d920-9e4c-4828-89dd-4e95975d5ec8-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45\" (UID: \"c4f4d920-9e4c-4828-89dd-4e95975d5ec8\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45" Oct 03 08:50:58 crc kubenswrapper[4899]: I1003 08:50:58.908751 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hz8vv\" (UniqueName: \"kubernetes.io/projected/c4f4d920-9e4c-4828-89dd-4e95975d5ec8-kube-api-access-hz8vv\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45\" (UID: \"c4f4d920-9e4c-4828-89dd-4e95975d5ec8\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45" Oct 03 08:50:58 crc kubenswrapper[4899]: I1003 08:50:58.908884 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c4f4d920-9e4c-4828-89dd-4e95975d5ec8-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45\" (UID: \"c4f4d920-9e4c-4828-89dd-4e95975d5ec8\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45" Oct 03 08:50:59 crc kubenswrapper[4899]: I1003 08:50:59.009985 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c4f4d920-9e4c-4828-89dd-4e95975d5ec8-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45\" (UID: \"c4f4d920-9e4c-4828-89dd-4e95975d5ec8\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45" Oct 03 08:50:59 crc kubenswrapper[4899]: I1003 08:50:59.010350 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hz8vv\" (UniqueName: \"kubernetes.io/projected/c4f4d920-9e4c-4828-89dd-4e95975d5ec8-kube-api-access-hz8vv\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45\" (UID: \"c4f4d920-9e4c-4828-89dd-4e95975d5ec8\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45" Oct 03 08:50:59 crc kubenswrapper[4899]: I1003 08:50:59.010486 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c4f4d920-9e4c-4828-89dd-4e95975d5ec8-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45\" (UID: \"c4f4d920-9e4c-4828-89dd-4e95975d5ec8\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45" Oct 03 08:50:59 crc kubenswrapper[4899]: I1003 08:50:59.010594 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c4f4d920-9e4c-4828-89dd-4e95975d5ec8-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45\" (UID: \"c4f4d920-9e4c-4828-89dd-4e95975d5ec8\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45" Oct 03 08:50:59 crc kubenswrapper[4899]: I1003 08:50:59.011029 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c4f4d920-9e4c-4828-89dd-4e95975d5ec8-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45\" (UID: \"c4f4d920-9e4c-4828-89dd-4e95975d5ec8\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45" Oct 03 08:50:59 crc kubenswrapper[4899]: I1003 08:50:59.029049 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hz8vv\" (UniqueName: \"kubernetes.io/projected/c4f4d920-9e4c-4828-89dd-4e95975d5ec8-kube-api-access-hz8vv\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45\" (UID: \"c4f4d920-9e4c-4828-89dd-4e95975d5ec8\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45" Oct 03 08:50:59 crc kubenswrapper[4899]: I1003 08:50:59.152062 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45" Oct 03 08:50:59 crc kubenswrapper[4899]: I1003 08:50:59.317230 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45"] Oct 03 08:50:59 crc kubenswrapper[4899]: I1003 08:50:59.421859 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45" event={"ID":"c4f4d920-9e4c-4828-89dd-4e95975d5ec8","Type":"ContainerStarted","Data":"f9c0436f48c5b70c0b23aa9f6297d8b187b19a6046a717cf3e69ac886b20ce4c"} Oct 03 08:51:00 crc kubenswrapper[4899]: I1003 08:51:00.428223 4899 generic.go:334] "Generic (PLEG): container finished" podID="c4f4d920-9e4c-4828-89dd-4e95975d5ec8" containerID="c964cf3a300a814ca52306ea4857b82a07aff8ec64338d395d6d298e26f27d13" exitCode=0 Oct 03 08:51:00 crc kubenswrapper[4899]: I1003 08:51:00.428314 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45" event={"ID":"c4f4d920-9e4c-4828-89dd-4e95975d5ec8","Type":"ContainerDied","Data":"c964cf3a300a814ca52306ea4857b82a07aff8ec64338d395d6d298e26f27d13"} Oct 03 08:51:02 crc kubenswrapper[4899]: I1003 08:51:02.440214 4899 generic.go:334] "Generic (PLEG): container finished" podID="c4f4d920-9e4c-4828-89dd-4e95975d5ec8" containerID="d4a289b4c08dbba05387c24ae8744880afd80287b5ec754a09cbc706c477ff43" exitCode=0 Oct 03 08:51:02 crc kubenswrapper[4899]: I1003 08:51:02.440326 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45" event={"ID":"c4f4d920-9e4c-4828-89dd-4e95975d5ec8","Type":"ContainerDied","Data":"d4a289b4c08dbba05387c24ae8744880afd80287b5ec754a09cbc706c477ff43"} Oct 03 08:51:03 crc kubenswrapper[4899]: I1003 08:51:03.451422 4899 generic.go:334] "Generic (PLEG): container finished" podID="c4f4d920-9e4c-4828-89dd-4e95975d5ec8" containerID="c2d8fee29ef0f0948c9a700c5d8372a4975ca0d6ebe01a0a0fd691b8698d8108" exitCode=0 Oct 03 08:51:03 crc kubenswrapper[4899]: I1003 08:51:03.451477 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45" event={"ID":"c4f4d920-9e4c-4828-89dd-4e95975d5ec8","Type":"ContainerDied","Data":"c2d8fee29ef0f0948c9a700c5d8372a4975ca0d6ebe01a0a0fd691b8698d8108"} Oct 03 08:51:04 crc kubenswrapper[4899]: I1003 08:51:04.652926 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45" Oct 03 08:51:04 crc kubenswrapper[4899]: I1003 08:51:04.782317 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hz8vv\" (UniqueName: \"kubernetes.io/projected/c4f4d920-9e4c-4828-89dd-4e95975d5ec8-kube-api-access-hz8vv\") pod \"c4f4d920-9e4c-4828-89dd-4e95975d5ec8\" (UID: \"c4f4d920-9e4c-4828-89dd-4e95975d5ec8\") " Oct 03 08:51:04 crc kubenswrapper[4899]: I1003 08:51:04.782396 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c4f4d920-9e4c-4828-89dd-4e95975d5ec8-bundle\") pod \"c4f4d920-9e4c-4828-89dd-4e95975d5ec8\" (UID: \"c4f4d920-9e4c-4828-89dd-4e95975d5ec8\") " Oct 03 08:51:04 crc kubenswrapper[4899]: I1003 08:51:04.782420 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c4f4d920-9e4c-4828-89dd-4e95975d5ec8-util\") pod \"c4f4d920-9e4c-4828-89dd-4e95975d5ec8\" (UID: \"c4f4d920-9e4c-4828-89dd-4e95975d5ec8\") " Oct 03 08:51:04 crc kubenswrapper[4899]: I1003 08:51:04.783056 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4f4d920-9e4c-4828-89dd-4e95975d5ec8-bundle" (OuterVolumeSpecName: "bundle") pod "c4f4d920-9e4c-4828-89dd-4e95975d5ec8" (UID: "c4f4d920-9e4c-4828-89dd-4e95975d5ec8"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:51:04 crc kubenswrapper[4899]: I1003 08:51:04.787236 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4f4d920-9e4c-4828-89dd-4e95975d5ec8-kube-api-access-hz8vv" (OuterVolumeSpecName: "kube-api-access-hz8vv") pod "c4f4d920-9e4c-4828-89dd-4e95975d5ec8" (UID: "c4f4d920-9e4c-4828-89dd-4e95975d5ec8"). InnerVolumeSpecName "kube-api-access-hz8vv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:51:04 crc kubenswrapper[4899]: I1003 08:51:04.883588 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hz8vv\" (UniqueName: \"kubernetes.io/projected/c4f4d920-9e4c-4828-89dd-4e95975d5ec8-kube-api-access-hz8vv\") on node \"crc\" DevicePath \"\"" Oct 03 08:51:04 crc kubenswrapper[4899]: I1003 08:51:04.883629 4899 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c4f4d920-9e4c-4828-89dd-4e95975d5ec8-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:51:05 crc kubenswrapper[4899]: I1003 08:51:05.071249 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4f4d920-9e4c-4828-89dd-4e95975d5ec8-util" (OuterVolumeSpecName: "util") pod "c4f4d920-9e4c-4828-89dd-4e95975d5ec8" (UID: "c4f4d920-9e4c-4828-89dd-4e95975d5ec8"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:51:05 crc kubenswrapper[4899]: I1003 08:51:05.088019 4899 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c4f4d920-9e4c-4828-89dd-4e95975d5ec8-util\") on node \"crc\" DevicePath \"\"" Oct 03 08:51:05 crc kubenswrapper[4899]: I1003 08:51:05.470823 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45" event={"ID":"c4f4d920-9e4c-4828-89dd-4e95975d5ec8","Type":"ContainerDied","Data":"f9c0436f48c5b70c0b23aa9f6297d8b187b19a6046a717cf3e69ac886b20ce4c"} Oct 03 08:51:05 crc kubenswrapper[4899]: I1003 08:51:05.470863 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9c0436f48c5b70c0b23aa9f6297d8b187b19a6046a717cf3e69ac886b20ce4c" Oct 03 08:51:05 crc kubenswrapper[4899]: I1003 08:51:05.470886 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45" Oct 03 08:51:07 crc kubenswrapper[4899]: I1003 08:51:07.890235 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-kf8v5"] Oct 03 08:51:07 crc kubenswrapper[4899]: E1003 08:51:07.892041 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4f4d920-9e4c-4828-89dd-4e95975d5ec8" containerName="util" Oct 03 08:51:07 crc kubenswrapper[4899]: I1003 08:51:07.892125 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4f4d920-9e4c-4828-89dd-4e95975d5ec8" containerName="util" Oct 03 08:51:07 crc kubenswrapper[4899]: E1003 08:51:07.892182 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4f4d920-9e4c-4828-89dd-4e95975d5ec8" containerName="pull" Oct 03 08:51:07 crc kubenswrapper[4899]: I1003 08:51:07.892230 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4f4d920-9e4c-4828-89dd-4e95975d5ec8" containerName="pull" Oct 03 08:51:07 crc kubenswrapper[4899]: E1003 08:51:07.892291 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4f4d920-9e4c-4828-89dd-4e95975d5ec8" containerName="extract" Oct 03 08:51:07 crc kubenswrapper[4899]: I1003 08:51:07.892355 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4f4d920-9e4c-4828-89dd-4e95975d5ec8" containerName="extract" Oct 03 08:51:07 crc kubenswrapper[4899]: I1003 08:51:07.892512 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4f4d920-9e4c-4828-89dd-4e95975d5ec8" containerName="extract" Oct 03 08:51:07 crc kubenswrapper[4899]: I1003 08:51:07.892967 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-kf8v5" Oct 03 08:51:07 crc kubenswrapper[4899]: I1003 08:51:07.895122 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-h8zxm" Oct 03 08:51:07 crc kubenswrapper[4899]: I1003 08:51:07.895184 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 03 08:51:07 crc kubenswrapper[4899]: I1003 08:51:07.895365 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 03 08:51:07 crc kubenswrapper[4899]: I1003 08:51:07.900751 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-kf8v5"] Oct 03 08:51:08 crc kubenswrapper[4899]: I1003 08:51:08.019484 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cljnk\" (UniqueName: \"kubernetes.io/projected/65081539-f48e-404c-96a9-c1f8035404ed-kube-api-access-cljnk\") pod \"nmstate-operator-858ddd8f98-kf8v5\" (UID: \"65081539-f48e-404c-96a9-c1f8035404ed\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-kf8v5" Oct 03 08:51:08 crc kubenswrapper[4899]: I1003 08:51:08.120806 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cljnk\" (UniqueName: \"kubernetes.io/projected/65081539-f48e-404c-96a9-c1f8035404ed-kube-api-access-cljnk\") pod \"nmstate-operator-858ddd8f98-kf8v5\" (UID: \"65081539-f48e-404c-96a9-c1f8035404ed\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-kf8v5" Oct 03 08:51:08 crc kubenswrapper[4899]: I1003 08:51:08.138967 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cljnk\" (UniqueName: \"kubernetes.io/projected/65081539-f48e-404c-96a9-c1f8035404ed-kube-api-access-cljnk\") pod \"nmstate-operator-858ddd8f98-kf8v5\" (UID: \"65081539-f48e-404c-96a9-c1f8035404ed\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-kf8v5" Oct 03 08:51:08 crc kubenswrapper[4899]: I1003 08:51:08.206630 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-kf8v5" Oct 03 08:51:08 crc kubenswrapper[4899]: I1003 08:51:08.372548 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-kf8v5"] Oct 03 08:51:08 crc kubenswrapper[4899]: W1003 08:51:08.378771 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65081539_f48e_404c_96a9_c1f8035404ed.slice/crio-bee893765e7033f2fb9a94d002997eaedd5c7704e37ea8ff34595b0a3ecfb2f7 WatchSource:0}: Error finding container bee893765e7033f2fb9a94d002997eaedd5c7704e37ea8ff34595b0a3ecfb2f7: Status 404 returned error can't find the container with id bee893765e7033f2fb9a94d002997eaedd5c7704e37ea8ff34595b0a3ecfb2f7 Oct 03 08:51:08 crc kubenswrapper[4899]: I1003 08:51:08.487019 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-kf8v5" event={"ID":"65081539-f48e-404c-96a9-c1f8035404ed","Type":"ContainerStarted","Data":"bee893765e7033f2fb9a94d002997eaedd5c7704e37ea8ff34595b0a3ecfb2f7"} Oct 03 08:51:11 crc kubenswrapper[4899]: I1003 08:51:11.500429 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-kf8v5" event={"ID":"65081539-f48e-404c-96a9-c1f8035404ed","Type":"ContainerStarted","Data":"63aa4b404c57569c0cfcf5cbc17a6b0f0f5a4d71354bc823edefd57bd5d6074a"} Oct 03 08:51:11 crc kubenswrapper[4899]: I1003 08:51:11.518196 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-858ddd8f98-kf8v5" podStartSLOduration=2.318340642 podStartE2EDuration="4.518179774s" podCreationTimestamp="2025-10-03 08:51:07 +0000 UTC" firstStartedPulling="2025-10-03 08:51:08.381561927 +0000 UTC m=+642.489046870" lastFinishedPulling="2025-10-03 08:51:10.581401049 +0000 UTC m=+644.688886002" observedRunningTime="2025-10-03 08:51:11.5168274 +0000 UTC m=+645.624312353" watchObservedRunningTime="2025-10-03 08:51:11.518179774 +0000 UTC m=+645.625664727" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.475261 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-jqv6w"] Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.476282 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-jqv6w" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.479199 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-6ftlm" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.486292 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-jqv6w"] Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.493625 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-65rlw"] Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.494461 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-65rlw" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.499907 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.524100 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-65rlw"] Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.560826 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-wggc2"] Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.561497 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-wggc2" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.583392 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnvzz\" (UniqueName: \"kubernetes.io/projected/e97cb3b7-2cf2-4021-b189-0e4c79b60f9a-kube-api-access-lnvzz\") pod \"nmstate-metrics-fdff9cb8d-jqv6w\" (UID: \"e97cb3b7-2cf2-4021-b189-0e4c79b60f9a\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-jqv6w" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.583558 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/cfd8a398-f0f8-47ed-9f92-49edea78e66b-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-65rlw\" (UID: \"cfd8a398-f0f8-47ed-9f92-49edea78e66b\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-65rlw" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.583629 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67f4f\" (UniqueName: \"kubernetes.io/projected/cfd8a398-f0f8-47ed-9f92-49edea78e66b-kube-api-access-67f4f\") pod \"nmstate-webhook-6cdbc54649-65rlw\" (UID: \"cfd8a398-f0f8-47ed-9f92-49edea78e66b\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-65rlw" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.630178 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-9lk8l"] Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.630783 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-9lk8l" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.634089 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.634600 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-p5dps" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.639532 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-9lk8l"] Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.641506 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.684945 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/3bcb7a6a-9902-4fea-a7d2-c7a508d7f695-nmstate-lock\") pod \"nmstate-handler-wggc2\" (UID: \"3bcb7a6a-9902-4fea-a7d2-c7a508d7f695\") " pod="openshift-nmstate/nmstate-handler-wggc2" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.685010 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/3bcb7a6a-9902-4fea-a7d2-c7a508d7f695-ovs-socket\") pod \"nmstate-handler-wggc2\" (UID: \"3bcb7a6a-9902-4fea-a7d2-c7a508d7f695\") " pod="openshift-nmstate/nmstate-handler-wggc2" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.685036 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/932306e1-0688-47a3-af53-642db1b63eb0-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-9lk8l\" (UID: \"932306e1-0688-47a3-af53-642db1b63eb0\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-9lk8l" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.685121 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnvzz\" (UniqueName: \"kubernetes.io/projected/e97cb3b7-2cf2-4021-b189-0e4c79b60f9a-kube-api-access-lnvzz\") pod \"nmstate-metrics-fdff9cb8d-jqv6w\" (UID: \"e97cb3b7-2cf2-4021-b189-0e4c79b60f9a\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-jqv6w" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.685151 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/932306e1-0688-47a3-af53-642db1b63eb0-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-9lk8l\" (UID: \"932306e1-0688-47a3-af53-642db1b63eb0\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-9lk8l" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.685199 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/cfd8a398-f0f8-47ed-9f92-49edea78e66b-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-65rlw\" (UID: \"cfd8a398-f0f8-47ed-9f92-49edea78e66b\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-65rlw" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.685225 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67f4f\" (UniqueName: \"kubernetes.io/projected/cfd8a398-f0f8-47ed-9f92-49edea78e66b-kube-api-access-67f4f\") pod \"nmstate-webhook-6cdbc54649-65rlw\" (UID: \"cfd8a398-f0f8-47ed-9f92-49edea78e66b\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-65rlw" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.685246 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxn84\" (UniqueName: \"kubernetes.io/projected/3bcb7a6a-9902-4fea-a7d2-c7a508d7f695-kube-api-access-fxn84\") pod \"nmstate-handler-wggc2\" (UID: \"3bcb7a6a-9902-4fea-a7d2-c7a508d7f695\") " pod="openshift-nmstate/nmstate-handler-wggc2" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.685277 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sp5wh\" (UniqueName: \"kubernetes.io/projected/932306e1-0688-47a3-af53-642db1b63eb0-kube-api-access-sp5wh\") pod \"nmstate-console-plugin-6b874cbd85-9lk8l\" (UID: \"932306e1-0688-47a3-af53-642db1b63eb0\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-9lk8l" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.685303 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/3bcb7a6a-9902-4fea-a7d2-c7a508d7f695-dbus-socket\") pod \"nmstate-handler-wggc2\" (UID: \"3bcb7a6a-9902-4fea-a7d2-c7a508d7f695\") " pod="openshift-nmstate/nmstate-handler-wggc2" Oct 03 08:51:12 crc kubenswrapper[4899]: E1003 08:51:12.685349 4899 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Oct 03 08:51:12 crc kubenswrapper[4899]: E1003 08:51:12.685424 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cfd8a398-f0f8-47ed-9f92-49edea78e66b-tls-key-pair podName:cfd8a398-f0f8-47ed-9f92-49edea78e66b nodeName:}" failed. No retries permitted until 2025-10-03 08:51:13.185402418 +0000 UTC m=+647.292887371 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/cfd8a398-f0f8-47ed-9f92-49edea78e66b-tls-key-pair") pod "nmstate-webhook-6cdbc54649-65rlw" (UID: "cfd8a398-f0f8-47ed-9f92-49edea78e66b") : secret "openshift-nmstate-webhook" not found Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.703413 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67f4f\" (UniqueName: \"kubernetes.io/projected/cfd8a398-f0f8-47ed-9f92-49edea78e66b-kube-api-access-67f4f\") pod \"nmstate-webhook-6cdbc54649-65rlw\" (UID: \"cfd8a398-f0f8-47ed-9f92-49edea78e66b\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-65rlw" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.703419 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnvzz\" (UniqueName: \"kubernetes.io/projected/e97cb3b7-2cf2-4021-b189-0e4c79b60f9a-kube-api-access-lnvzz\") pod \"nmstate-metrics-fdff9cb8d-jqv6w\" (UID: \"e97cb3b7-2cf2-4021-b189-0e4c79b60f9a\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-jqv6w" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.787841 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sp5wh\" (UniqueName: \"kubernetes.io/projected/932306e1-0688-47a3-af53-642db1b63eb0-kube-api-access-sp5wh\") pod \"nmstate-console-plugin-6b874cbd85-9lk8l\" (UID: \"932306e1-0688-47a3-af53-642db1b63eb0\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-9lk8l" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.787934 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/3bcb7a6a-9902-4fea-a7d2-c7a508d7f695-dbus-socket\") pod \"nmstate-handler-wggc2\" (UID: \"3bcb7a6a-9902-4fea-a7d2-c7a508d7f695\") " pod="openshift-nmstate/nmstate-handler-wggc2" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.787997 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/3bcb7a6a-9902-4fea-a7d2-c7a508d7f695-nmstate-lock\") pod \"nmstate-handler-wggc2\" (UID: \"3bcb7a6a-9902-4fea-a7d2-c7a508d7f695\") " pod="openshift-nmstate/nmstate-handler-wggc2" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.788084 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/3bcb7a6a-9902-4fea-a7d2-c7a508d7f695-ovs-socket\") pod \"nmstate-handler-wggc2\" (UID: \"3bcb7a6a-9902-4fea-a7d2-c7a508d7f695\") " pod="openshift-nmstate/nmstate-handler-wggc2" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.788104 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/932306e1-0688-47a3-af53-642db1b63eb0-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-9lk8l\" (UID: \"932306e1-0688-47a3-af53-642db1b63eb0\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-9lk8l" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.788203 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/3bcb7a6a-9902-4fea-a7d2-c7a508d7f695-ovs-socket\") pod \"nmstate-handler-wggc2\" (UID: \"3bcb7a6a-9902-4fea-a7d2-c7a508d7f695\") " pod="openshift-nmstate/nmstate-handler-wggc2" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.788206 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/932306e1-0688-47a3-af53-642db1b63eb0-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-9lk8l\" (UID: \"932306e1-0688-47a3-af53-642db1b63eb0\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-9lk8l" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.788234 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/3bcb7a6a-9902-4fea-a7d2-c7a508d7f695-nmstate-lock\") pod \"nmstate-handler-wggc2\" (UID: \"3bcb7a6a-9902-4fea-a7d2-c7a508d7f695\") " pod="openshift-nmstate/nmstate-handler-wggc2" Oct 03 08:51:12 crc kubenswrapper[4899]: E1003 08:51:12.788284 4899 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Oct 03 08:51:12 crc kubenswrapper[4899]: E1003 08:51:12.788332 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/932306e1-0688-47a3-af53-642db1b63eb0-plugin-serving-cert podName:932306e1-0688-47a3-af53-642db1b63eb0 nodeName:}" failed. No retries permitted until 2025-10-03 08:51:13.288313025 +0000 UTC m=+647.395797978 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/932306e1-0688-47a3-af53-642db1b63eb0-plugin-serving-cert") pod "nmstate-console-plugin-6b874cbd85-9lk8l" (UID: "932306e1-0688-47a3-af53-642db1b63eb0") : secret "plugin-serving-cert" not found Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.788348 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxn84\" (UniqueName: \"kubernetes.io/projected/3bcb7a6a-9902-4fea-a7d2-c7a508d7f695-kube-api-access-fxn84\") pod \"nmstate-handler-wggc2\" (UID: \"3bcb7a6a-9902-4fea-a7d2-c7a508d7f695\") " pod="openshift-nmstate/nmstate-handler-wggc2" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.788348 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/3bcb7a6a-9902-4fea-a7d2-c7a508d7f695-dbus-socket\") pod \"nmstate-handler-wggc2\" (UID: \"3bcb7a6a-9902-4fea-a7d2-c7a508d7f695\") " pod="openshift-nmstate/nmstate-handler-wggc2" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.789528 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/932306e1-0688-47a3-af53-642db1b63eb0-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-9lk8l\" (UID: \"932306e1-0688-47a3-af53-642db1b63eb0\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-9lk8l" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.791819 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-jqv6w" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.807425 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxn84\" (UniqueName: \"kubernetes.io/projected/3bcb7a6a-9902-4fea-a7d2-c7a508d7f695-kube-api-access-fxn84\") pod \"nmstate-handler-wggc2\" (UID: \"3bcb7a6a-9902-4fea-a7d2-c7a508d7f695\") " pod="openshift-nmstate/nmstate-handler-wggc2" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.824910 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sp5wh\" (UniqueName: \"kubernetes.io/projected/932306e1-0688-47a3-af53-642db1b63eb0-kube-api-access-sp5wh\") pod \"nmstate-console-plugin-6b874cbd85-9lk8l\" (UID: \"932306e1-0688-47a3-af53-642db1b63eb0\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-9lk8l" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.853447 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-59c7996d7f-bzgxc"] Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.854285 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.868390 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-59c7996d7f-bzgxc"] Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.879734 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-wggc2" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.989676 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-console-config\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.990343 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-trusted-ca-bundle\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.990377 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfmkf\" (UniqueName: \"kubernetes.io/projected/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-kube-api-access-jfmkf\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.990442 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-console-serving-cert\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.990468 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-service-ca\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.990640 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-oauth-serving-cert\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:12 crc kubenswrapper[4899]: I1003 08:51:12.990690 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-console-oauth-config\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.043696 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-jqv6w"] Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.091567 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-oauth-serving-cert\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.091624 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-console-oauth-config\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.091696 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-console-config\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.091749 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-trusted-ca-bundle\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.091773 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfmkf\" (UniqueName: \"kubernetes.io/projected/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-kube-api-access-jfmkf\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.091822 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-console-serving-cert\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.091842 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-service-ca\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.092716 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-oauth-serving-cert\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.092826 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-service-ca\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.092990 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-console-config\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.093814 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-trusted-ca-bundle\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.096906 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-console-serving-cert\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.097944 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-console-oauth-config\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.156648 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfmkf\" (UniqueName: \"kubernetes.io/projected/abc9e3ad-c67c-4787-8d47-dc927ad4bb29-kube-api-access-jfmkf\") pod \"console-59c7996d7f-bzgxc\" (UID: \"abc9e3ad-c67c-4787-8d47-dc927ad4bb29\") " pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.171530 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.193087 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/cfd8a398-f0f8-47ed-9f92-49edea78e66b-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-65rlw\" (UID: \"cfd8a398-f0f8-47ed-9f92-49edea78e66b\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-65rlw" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.196598 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/cfd8a398-f0f8-47ed-9f92-49edea78e66b-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-65rlw\" (UID: \"cfd8a398-f0f8-47ed-9f92-49edea78e66b\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-65rlw" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.294690 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/932306e1-0688-47a3-af53-642db1b63eb0-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-9lk8l\" (UID: \"932306e1-0688-47a3-af53-642db1b63eb0\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-9lk8l" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.298744 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/932306e1-0688-47a3-af53-642db1b63eb0-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-9lk8l\" (UID: \"932306e1-0688-47a3-af53-642db1b63eb0\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-9lk8l" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.349018 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-59c7996d7f-bzgxc"] Oct 03 08:51:13 crc kubenswrapper[4899]: W1003 08:51:13.357591 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podabc9e3ad_c67c_4787_8d47_dc927ad4bb29.slice/crio-a7c5e43fe6a2c515fd968028f534b2b54eeb9e9e1c0bf551ae823f70bac45f86 WatchSource:0}: Error finding container a7c5e43fe6a2c515fd968028f534b2b54eeb9e9e1c0bf551ae823f70bac45f86: Status 404 returned error can't find the container with id a7c5e43fe6a2c515fd968028f534b2b54eeb9e9e1c0bf551ae823f70bac45f86 Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.419426 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-65rlw" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.529496 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-59c7996d7f-bzgxc" event={"ID":"abc9e3ad-c67c-4787-8d47-dc927ad4bb29","Type":"ContainerStarted","Data":"e05378db16268c1fe78a99e7a68de0284cc9a752b128e38d09fda3832134175d"} Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.529527 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-59c7996d7f-bzgxc" event={"ID":"abc9e3ad-c67c-4787-8d47-dc927ad4bb29","Type":"ContainerStarted","Data":"a7c5e43fe6a2c515fd968028f534b2b54eeb9e9e1c0bf551ae823f70bac45f86"} Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.532523 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-wggc2" event={"ID":"3bcb7a6a-9902-4fea-a7d2-c7a508d7f695","Type":"ContainerStarted","Data":"0364562c5ac33b407accf504f685ae7fe9ed27eb194b5baab93d0056379dc45d"} Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.536280 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-jqv6w" event={"ID":"e97cb3b7-2cf2-4021-b189-0e4c79b60f9a","Type":"ContainerStarted","Data":"86edd3a742d895c90d347e12a1abfaf58a6fd686ad9c1faf98a19017dfcd9399"} Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.551436 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-9lk8l" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.551462 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-59c7996d7f-bzgxc" podStartSLOduration=1.551444651 podStartE2EDuration="1.551444651s" podCreationTimestamp="2025-10-03 08:51:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:51:13.546111003 +0000 UTC m=+647.653595966" watchObservedRunningTime="2025-10-03 08:51:13.551444651 +0000 UTC m=+647.658929594" Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.704988 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-9lk8l"] Oct 03 08:51:13 crc kubenswrapper[4899]: W1003 08:51:13.708524 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod932306e1_0688_47a3_af53_642db1b63eb0.slice/crio-e3afdf9513460718bfee075fdab4d37086b821d4f919aa4f709bc8b48149e8f6 WatchSource:0}: Error finding container e3afdf9513460718bfee075fdab4d37086b821d4f919aa4f709bc8b48149e8f6: Status 404 returned error can't find the container with id e3afdf9513460718bfee075fdab4d37086b821d4f919aa4f709bc8b48149e8f6 Oct 03 08:51:13 crc kubenswrapper[4899]: I1003 08:51:13.799956 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-65rlw"] Oct 03 08:51:14 crc kubenswrapper[4899]: I1003 08:51:14.542580 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-65rlw" event={"ID":"cfd8a398-f0f8-47ed-9f92-49edea78e66b","Type":"ContainerStarted","Data":"6c87f53591bb966a048240b5005eac4bbf9d49c97526b3373e20a4348e98d398"} Oct 03 08:51:14 crc kubenswrapper[4899]: I1003 08:51:14.543839 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-9lk8l" event={"ID":"932306e1-0688-47a3-af53-642db1b63eb0","Type":"ContainerStarted","Data":"e3afdf9513460718bfee075fdab4d37086b821d4f919aa4f709bc8b48149e8f6"} Oct 03 08:51:15 crc kubenswrapper[4899]: I1003 08:51:15.551737 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-wggc2" event={"ID":"3bcb7a6a-9902-4fea-a7d2-c7a508d7f695","Type":"ContainerStarted","Data":"155e71d3af3eb00cdf39efbfcc0329ecbd8f0d5f9c70fe6d1a1f95bbe2a6c923"} Oct 03 08:51:15 crc kubenswrapper[4899]: I1003 08:51:15.553684 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-wggc2" Oct 03 08:51:15 crc kubenswrapper[4899]: I1003 08:51:15.554540 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-jqv6w" event={"ID":"e97cb3b7-2cf2-4021-b189-0e4c79b60f9a","Type":"ContainerStarted","Data":"3ceff0f1d8ee56dfa2e0345f2117ecc440c026ca6f5c264d253525144c754b84"} Oct 03 08:51:15 crc kubenswrapper[4899]: I1003 08:51:15.556963 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-65rlw" event={"ID":"cfd8a398-f0f8-47ed-9f92-49edea78e66b","Type":"ContainerStarted","Data":"e57e1c997924231a213fd517a6f6499c6225d33e0cb1b529d0243f2dd2357d48"} Oct 03 08:51:15 crc kubenswrapper[4899]: I1003 08:51:15.557508 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-65rlw" Oct 03 08:51:15 crc kubenswrapper[4899]: I1003 08:51:15.572242 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-wggc2" podStartSLOduration=1.350205591 podStartE2EDuration="3.572207753s" podCreationTimestamp="2025-10-03 08:51:12 +0000 UTC" firstStartedPulling="2025-10-03 08:51:12.903684154 +0000 UTC m=+647.011169107" lastFinishedPulling="2025-10-03 08:51:15.125686316 +0000 UTC m=+649.233171269" observedRunningTime="2025-10-03 08:51:15.56860827 +0000 UTC m=+649.676093223" watchObservedRunningTime="2025-10-03 08:51:15.572207753 +0000 UTC m=+649.679692706" Oct 03 08:51:15 crc kubenswrapper[4899]: I1003 08:51:15.591416 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-65rlw" podStartSLOduration=2.258291981 podStartE2EDuration="3.591397269s" podCreationTimestamp="2025-10-03 08:51:12 +0000 UTC" firstStartedPulling="2025-10-03 08:51:13.810520394 +0000 UTC m=+647.918005347" lastFinishedPulling="2025-10-03 08:51:15.143625682 +0000 UTC m=+649.251110635" observedRunningTime="2025-10-03 08:51:15.58671111 +0000 UTC m=+649.694196063" watchObservedRunningTime="2025-10-03 08:51:15.591397269 +0000 UTC m=+649.698882222" Oct 03 08:51:16 crc kubenswrapper[4899]: I1003 08:51:16.564923 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-9lk8l" event={"ID":"932306e1-0688-47a3-af53-642db1b63eb0","Type":"ContainerStarted","Data":"96330f11ef173309dacb9a8adfcac94bf567e25301ec4eafee0f32209e4b6742"} Oct 03 08:51:17 crc kubenswrapper[4899]: I1003 08:51:17.571596 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-jqv6w" event={"ID":"e97cb3b7-2cf2-4021-b189-0e4c79b60f9a","Type":"ContainerStarted","Data":"cb73836a3bf87a62356e84991491b261bcfcfdcfcf9189df7aceaf152d28ff39"} Oct 03 08:51:17 crc kubenswrapper[4899]: I1003 08:51:17.595491 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-jqv6w" podStartSLOduration=1.326433688 podStartE2EDuration="5.595468692s" podCreationTimestamp="2025-10-03 08:51:12 +0000 UTC" firstStartedPulling="2025-10-03 08:51:13.04774587 +0000 UTC m=+647.155230823" lastFinishedPulling="2025-10-03 08:51:17.316780874 +0000 UTC m=+651.424265827" observedRunningTime="2025-10-03 08:51:17.592733006 +0000 UTC m=+651.700217999" watchObservedRunningTime="2025-10-03 08:51:17.595468692 +0000 UTC m=+651.702953645" Oct 03 08:51:17 crc kubenswrapper[4899]: I1003 08:51:17.596405 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-9lk8l" podStartSLOduration=3.203853409 podStartE2EDuration="5.596397142s" podCreationTimestamp="2025-10-03 08:51:12 +0000 UTC" firstStartedPulling="2025-10-03 08:51:13.710314712 +0000 UTC m=+647.817799665" lastFinishedPulling="2025-10-03 08:51:16.102858445 +0000 UTC m=+650.210343398" observedRunningTime="2025-10-03 08:51:16.577604487 +0000 UTC m=+650.685089460" watchObservedRunningTime="2025-10-03 08:51:17.596397142 +0000 UTC m=+651.703882105" Oct 03 08:51:22 crc kubenswrapper[4899]: I1003 08:51:22.899609 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-wggc2" Oct 03 08:51:23 crc kubenswrapper[4899]: I1003 08:51:23.172434 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:23 crc kubenswrapper[4899]: I1003 08:51:23.173143 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:23 crc kubenswrapper[4899]: I1003 08:51:23.176904 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:23 crc kubenswrapper[4899]: I1003 08:51:23.604586 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-59c7996d7f-bzgxc" Oct 03 08:51:23 crc kubenswrapper[4899]: I1003 08:51:23.653303 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-k28dm"] Oct 03 08:51:33 crc kubenswrapper[4899]: I1003 08:51:33.426372 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-65rlw" Oct 03 08:51:44 crc kubenswrapper[4899]: I1003 08:51:44.731111 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb"] Oct 03 08:51:44 crc kubenswrapper[4899]: I1003 08:51:44.732703 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb" Oct 03 08:51:44 crc kubenswrapper[4899]: I1003 08:51:44.734177 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 03 08:51:44 crc kubenswrapper[4899]: I1003 08:51:44.740652 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb"] Oct 03 08:51:44 crc kubenswrapper[4899]: I1003 08:51:44.802156 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ab7bdf03-1685-4b51-b1a5-db0c9c4aa575-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb\" (UID: \"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb" Oct 03 08:51:44 crc kubenswrapper[4899]: I1003 08:51:44.802215 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-588fh\" (UniqueName: \"kubernetes.io/projected/ab7bdf03-1685-4b51-b1a5-db0c9c4aa575-kube-api-access-588fh\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb\" (UID: \"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb" Oct 03 08:51:44 crc kubenswrapper[4899]: I1003 08:51:44.802273 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ab7bdf03-1685-4b51-b1a5-db0c9c4aa575-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb\" (UID: \"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb" Oct 03 08:51:44 crc kubenswrapper[4899]: I1003 08:51:44.903159 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ab7bdf03-1685-4b51-b1a5-db0c9c4aa575-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb\" (UID: \"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb" Oct 03 08:51:44 crc kubenswrapper[4899]: I1003 08:51:44.903212 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-588fh\" (UniqueName: \"kubernetes.io/projected/ab7bdf03-1685-4b51-b1a5-db0c9c4aa575-kube-api-access-588fh\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb\" (UID: \"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb" Oct 03 08:51:44 crc kubenswrapper[4899]: I1003 08:51:44.903242 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ab7bdf03-1685-4b51-b1a5-db0c9c4aa575-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb\" (UID: \"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb" Oct 03 08:51:44 crc kubenswrapper[4899]: I1003 08:51:44.903719 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ab7bdf03-1685-4b51-b1a5-db0c9c4aa575-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb\" (UID: \"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb" Oct 03 08:51:44 crc kubenswrapper[4899]: I1003 08:51:44.904046 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ab7bdf03-1685-4b51-b1a5-db0c9c4aa575-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb\" (UID: \"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb" Oct 03 08:51:44 crc kubenswrapper[4899]: I1003 08:51:44.922491 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-588fh\" (UniqueName: \"kubernetes.io/projected/ab7bdf03-1685-4b51-b1a5-db0c9c4aa575-kube-api-access-588fh\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb\" (UID: \"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb" Oct 03 08:51:45 crc kubenswrapper[4899]: I1003 08:51:45.059950 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb" Oct 03 08:51:45 crc kubenswrapper[4899]: I1003 08:51:45.243436 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb"] Oct 03 08:51:45 crc kubenswrapper[4899]: I1003 08:51:45.710295 4899 generic.go:334] "Generic (PLEG): container finished" podID="ab7bdf03-1685-4b51-b1a5-db0c9c4aa575" containerID="eda26e4d95dcb0b89cd24abc9be96c5410a5a6db25853ab96d1246f333d21f77" exitCode=0 Oct 03 08:51:45 crc kubenswrapper[4899]: I1003 08:51:45.710381 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb" event={"ID":"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575","Type":"ContainerDied","Data":"eda26e4d95dcb0b89cd24abc9be96c5410a5a6db25853ab96d1246f333d21f77"} Oct 03 08:51:45 crc kubenswrapper[4899]: I1003 08:51:45.710913 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb" event={"ID":"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575","Type":"ContainerStarted","Data":"3950a5812bf244c92408cbca16aac5375245078bf0524e737d26ce7f4d18c306"} Oct 03 08:51:47 crc kubenswrapper[4899]: I1003 08:51:47.721584 4899 generic.go:334] "Generic (PLEG): container finished" podID="ab7bdf03-1685-4b51-b1a5-db0c9c4aa575" containerID="84b77440465cc14f6bb19305bb75cb10430db912fce4b1b07211463e92e31637" exitCode=0 Oct 03 08:51:47 crc kubenswrapper[4899]: I1003 08:51:47.721671 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb" event={"ID":"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575","Type":"ContainerDied","Data":"84b77440465cc14f6bb19305bb75cb10430db912fce4b1b07211463e92e31637"} Oct 03 08:51:48 crc kubenswrapper[4899]: I1003 08:51:48.693969 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-k28dm" podUID="633aedb3-7eca-4c2c-b6c3-69a0f7c4787d" containerName="console" containerID="cri-o://0c300360e76f1cadf78f91492ec9b0749864346e3775be9500dd0ab7eaf1a66b" gracePeriod=15 Oct 03 08:51:48 crc kubenswrapper[4899]: I1003 08:51:48.728485 4899 generic.go:334] "Generic (PLEG): container finished" podID="ab7bdf03-1685-4b51-b1a5-db0c9c4aa575" containerID="ca0372b2b170d73701c925c24c17e0f3f9aec370cc2a29a60d7a97ce92966d17" exitCode=0 Oct 03 08:51:48 crc kubenswrapper[4899]: I1003 08:51:48.728531 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb" event={"ID":"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575","Type":"ContainerDied","Data":"ca0372b2b170d73701c925c24c17e0f3f9aec370cc2a29a60d7a97ce92966d17"} Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.044583 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-k28dm_633aedb3-7eca-4c2c-b6c3-69a0f7c4787d/console/0.log" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.044993 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.158704 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-service-ca\") pod \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.158775 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-console-serving-cert\") pod \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.158798 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-oauth-serving-cert\") pod \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.158861 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-console-oauth-config\") pod \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.158901 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-console-config\") pod \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.158915 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6nbs\" (UniqueName: \"kubernetes.io/projected/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-kube-api-access-s6nbs\") pod \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.158930 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-trusted-ca-bundle\") pod \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\" (UID: \"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d\") " Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.159505 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "633aedb3-7eca-4c2c-b6c3-69a0f7c4787d" (UID: "633aedb3-7eca-4c2c-b6c3-69a0f7c4787d"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.159567 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "633aedb3-7eca-4c2c-b6c3-69a0f7c4787d" (UID: "633aedb3-7eca-4c2c-b6c3-69a0f7c4787d"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.159664 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-service-ca" (OuterVolumeSpecName: "service-ca") pod "633aedb3-7eca-4c2c-b6c3-69a0f7c4787d" (UID: "633aedb3-7eca-4c2c-b6c3-69a0f7c4787d"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.160506 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-console-config" (OuterVolumeSpecName: "console-config") pod "633aedb3-7eca-4c2c-b6c3-69a0f7c4787d" (UID: "633aedb3-7eca-4c2c-b6c3-69a0f7c4787d"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.165524 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-kube-api-access-s6nbs" (OuterVolumeSpecName: "kube-api-access-s6nbs") pod "633aedb3-7eca-4c2c-b6c3-69a0f7c4787d" (UID: "633aedb3-7eca-4c2c-b6c3-69a0f7c4787d"). InnerVolumeSpecName "kube-api-access-s6nbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.165545 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "633aedb3-7eca-4c2c-b6c3-69a0f7c4787d" (UID: "633aedb3-7eca-4c2c-b6c3-69a0f7c4787d"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.165799 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "633aedb3-7eca-4c2c-b6c3-69a0f7c4787d" (UID: "633aedb3-7eca-4c2c-b6c3-69a0f7c4787d"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.260383 4899 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.260425 4899 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.260435 4899 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-console-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.260444 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6nbs\" (UniqueName: \"kubernetes.io/projected/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-kube-api-access-s6nbs\") on node \"crc\" DevicePath \"\"" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.260460 4899 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.260470 4899 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.260478 4899 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.734940 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-k28dm_633aedb3-7eca-4c2c-b6c3-69a0f7c4787d/console/0.log" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.734999 4899 generic.go:334] "Generic (PLEG): container finished" podID="633aedb3-7eca-4c2c-b6c3-69a0f7c4787d" containerID="0c300360e76f1cadf78f91492ec9b0749864346e3775be9500dd0ab7eaf1a66b" exitCode=2 Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.735077 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-k28dm" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.735070 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-k28dm" event={"ID":"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d","Type":"ContainerDied","Data":"0c300360e76f1cadf78f91492ec9b0749864346e3775be9500dd0ab7eaf1a66b"} Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.735140 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-k28dm" event={"ID":"633aedb3-7eca-4c2c-b6c3-69a0f7c4787d","Type":"ContainerDied","Data":"d0034f42474a439149dd168a9351c037b7be101be0c76e77bc2fecb10fbadb99"} Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.735164 4899 scope.go:117] "RemoveContainer" containerID="0c300360e76f1cadf78f91492ec9b0749864346e3775be9500dd0ab7eaf1a66b" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.763001 4899 scope.go:117] "RemoveContainer" containerID="0c300360e76f1cadf78f91492ec9b0749864346e3775be9500dd0ab7eaf1a66b" Oct 03 08:51:49 crc kubenswrapper[4899]: E1003 08:51:49.763378 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c300360e76f1cadf78f91492ec9b0749864346e3775be9500dd0ab7eaf1a66b\": container with ID starting with 0c300360e76f1cadf78f91492ec9b0749864346e3775be9500dd0ab7eaf1a66b not found: ID does not exist" containerID="0c300360e76f1cadf78f91492ec9b0749864346e3775be9500dd0ab7eaf1a66b" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.763410 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c300360e76f1cadf78f91492ec9b0749864346e3775be9500dd0ab7eaf1a66b"} err="failed to get container status \"0c300360e76f1cadf78f91492ec9b0749864346e3775be9500dd0ab7eaf1a66b\": rpc error: code = NotFound desc = could not find container \"0c300360e76f1cadf78f91492ec9b0749864346e3775be9500dd0ab7eaf1a66b\": container with ID starting with 0c300360e76f1cadf78f91492ec9b0749864346e3775be9500dd0ab7eaf1a66b not found: ID does not exist" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.763446 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-k28dm"] Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.767557 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-k28dm"] Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.934286 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.995463 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ab7bdf03-1685-4b51-b1a5-db0c9c4aa575-bundle\") pod \"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575\" (UID: \"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575\") " Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.995630 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-588fh\" (UniqueName: \"kubernetes.io/projected/ab7bdf03-1685-4b51-b1a5-db0c9c4aa575-kube-api-access-588fh\") pod \"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575\" (UID: \"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575\") " Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.995667 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ab7bdf03-1685-4b51-b1a5-db0c9c4aa575-util\") pod \"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575\" (UID: \"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575\") " Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.997272 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab7bdf03-1685-4b51-b1a5-db0c9c4aa575-bundle" (OuterVolumeSpecName: "bundle") pod "ab7bdf03-1685-4b51-b1a5-db0c9c4aa575" (UID: "ab7bdf03-1685-4b51-b1a5-db0c9c4aa575"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:51:49 crc kubenswrapper[4899]: I1003 08:51:49.999331 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab7bdf03-1685-4b51-b1a5-db0c9c4aa575-kube-api-access-588fh" (OuterVolumeSpecName: "kube-api-access-588fh") pod "ab7bdf03-1685-4b51-b1a5-db0c9c4aa575" (UID: "ab7bdf03-1685-4b51-b1a5-db0c9c4aa575"). InnerVolumeSpecName "kube-api-access-588fh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:51:50 crc kubenswrapper[4899]: I1003 08:51:50.009285 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab7bdf03-1685-4b51-b1a5-db0c9c4aa575-util" (OuterVolumeSpecName: "util") pod "ab7bdf03-1685-4b51-b1a5-db0c9c4aa575" (UID: "ab7bdf03-1685-4b51-b1a5-db0c9c4aa575"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:51:50 crc kubenswrapper[4899]: I1003 08:51:50.097281 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-588fh\" (UniqueName: \"kubernetes.io/projected/ab7bdf03-1685-4b51-b1a5-db0c9c4aa575-kube-api-access-588fh\") on node \"crc\" DevicePath \"\"" Oct 03 08:51:50 crc kubenswrapper[4899]: I1003 08:51:50.097339 4899 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ab7bdf03-1685-4b51-b1a5-db0c9c4aa575-util\") on node \"crc\" DevicePath \"\"" Oct 03 08:51:50 crc kubenswrapper[4899]: I1003 08:51:50.097352 4899 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ab7bdf03-1685-4b51-b1a5-db0c9c4aa575-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:51:50 crc kubenswrapper[4899]: I1003 08:51:50.535172 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="633aedb3-7eca-4c2c-b6c3-69a0f7c4787d" path="/var/lib/kubelet/pods/633aedb3-7eca-4c2c-b6c3-69a0f7c4787d/volumes" Oct 03 08:51:50 crc kubenswrapper[4899]: I1003 08:51:50.745107 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb" event={"ID":"ab7bdf03-1685-4b51-b1a5-db0c9c4aa575","Type":"ContainerDied","Data":"3950a5812bf244c92408cbca16aac5375245078bf0524e737d26ce7f4d18c306"} Oct 03 08:51:50 crc kubenswrapper[4899]: I1003 08:51:50.745146 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3950a5812bf244c92408cbca16aac5375245078bf0524e737d26ce7f4d18c306" Oct 03 08:51:50 crc kubenswrapper[4899]: I1003 08:51:50.745311 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.213152 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh"] Oct 03 08:51:59 crc kubenswrapper[4899]: E1003 08:51:59.215079 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab7bdf03-1685-4b51-b1a5-db0c9c4aa575" containerName="util" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.215221 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab7bdf03-1685-4b51-b1a5-db0c9c4aa575" containerName="util" Oct 03 08:51:59 crc kubenswrapper[4899]: E1003 08:51:59.215323 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="633aedb3-7eca-4c2c-b6c3-69a0f7c4787d" containerName="console" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.215400 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="633aedb3-7eca-4c2c-b6c3-69a0f7c4787d" containerName="console" Oct 03 08:51:59 crc kubenswrapper[4899]: E1003 08:51:59.215474 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab7bdf03-1685-4b51-b1a5-db0c9c4aa575" containerName="pull" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.215535 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab7bdf03-1685-4b51-b1a5-db0c9c4aa575" containerName="pull" Oct 03 08:51:59 crc kubenswrapper[4899]: E1003 08:51:59.215602 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab7bdf03-1685-4b51-b1a5-db0c9c4aa575" containerName="extract" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.215721 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab7bdf03-1685-4b51-b1a5-db0c9c4aa575" containerName="extract" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.215981 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="633aedb3-7eca-4c2c-b6c3-69a0f7c4787d" containerName="console" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.216062 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab7bdf03-1685-4b51-b1a5-db0c9c4aa575" containerName="extract" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.216597 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.218322 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.218751 4899 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.218828 4899 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.218758 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.220145 4899 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-ktm5n" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.289277 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh"] Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.306236 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptq5f\" (UniqueName: \"kubernetes.io/projected/8643a4de-b352-4699-8054-7d4e4f97a946-kube-api-access-ptq5f\") pod \"metallb-operator-controller-manager-7dd48c8965-pvfqh\" (UID: \"8643a4de-b352-4699-8054-7d4e4f97a946\") " pod="metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.306285 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8643a4de-b352-4699-8054-7d4e4f97a946-webhook-cert\") pod \"metallb-operator-controller-manager-7dd48c8965-pvfqh\" (UID: \"8643a4de-b352-4699-8054-7d4e4f97a946\") " pod="metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.306322 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8643a4de-b352-4699-8054-7d4e4f97a946-apiservice-cert\") pod \"metallb-operator-controller-manager-7dd48c8965-pvfqh\" (UID: \"8643a4de-b352-4699-8054-7d4e4f97a946\") " pod="metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.408014 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8643a4de-b352-4699-8054-7d4e4f97a946-apiservice-cert\") pod \"metallb-operator-controller-manager-7dd48c8965-pvfqh\" (UID: \"8643a4de-b352-4699-8054-7d4e4f97a946\") " pod="metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.408105 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptq5f\" (UniqueName: \"kubernetes.io/projected/8643a4de-b352-4699-8054-7d4e4f97a946-kube-api-access-ptq5f\") pod \"metallb-operator-controller-manager-7dd48c8965-pvfqh\" (UID: \"8643a4de-b352-4699-8054-7d4e4f97a946\") " pod="metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.408131 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8643a4de-b352-4699-8054-7d4e4f97a946-webhook-cert\") pod \"metallb-operator-controller-manager-7dd48c8965-pvfqh\" (UID: \"8643a4de-b352-4699-8054-7d4e4f97a946\") " pod="metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.413476 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8643a4de-b352-4699-8054-7d4e4f97a946-apiservice-cert\") pod \"metallb-operator-controller-manager-7dd48c8965-pvfqh\" (UID: \"8643a4de-b352-4699-8054-7d4e4f97a946\") " pod="metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.413503 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8643a4de-b352-4699-8054-7d4e4f97a946-webhook-cert\") pod \"metallb-operator-controller-manager-7dd48c8965-pvfqh\" (UID: \"8643a4de-b352-4699-8054-7d4e4f97a946\") " pod="metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.424790 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptq5f\" (UniqueName: \"kubernetes.io/projected/8643a4de-b352-4699-8054-7d4e4f97a946-kube-api-access-ptq5f\") pod \"metallb-operator-controller-manager-7dd48c8965-pvfqh\" (UID: \"8643a4de-b352-4699-8054-7d4e4f97a946\") " pod="metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.470080 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988"] Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.470950 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.473016 4899 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.473277 4899 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.473651 4899 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-78l2w" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.492457 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988"] Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.534508 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.610533 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r48lw\" (UniqueName: \"kubernetes.io/projected/cd92ea76-f379-4b3d-aac6-2143d789e086-kube-api-access-r48lw\") pod \"metallb-operator-webhook-server-798f6d5f9c-6p988\" (UID: \"cd92ea76-f379-4b3d-aac6-2143d789e086\") " pod="metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.610726 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cd92ea76-f379-4b3d-aac6-2143d789e086-webhook-cert\") pod \"metallb-operator-webhook-server-798f6d5f9c-6p988\" (UID: \"cd92ea76-f379-4b3d-aac6-2143d789e086\") " pod="metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.610777 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cd92ea76-f379-4b3d-aac6-2143d789e086-apiservice-cert\") pod \"metallb-operator-webhook-server-798f6d5f9c-6p988\" (UID: \"cd92ea76-f379-4b3d-aac6-2143d789e086\") " pod="metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.715583 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cd92ea76-f379-4b3d-aac6-2143d789e086-apiservice-cert\") pod \"metallb-operator-webhook-server-798f6d5f9c-6p988\" (UID: \"cd92ea76-f379-4b3d-aac6-2143d789e086\") " pod="metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.715961 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r48lw\" (UniqueName: \"kubernetes.io/projected/cd92ea76-f379-4b3d-aac6-2143d789e086-kube-api-access-r48lw\") pod \"metallb-operator-webhook-server-798f6d5f9c-6p988\" (UID: \"cd92ea76-f379-4b3d-aac6-2143d789e086\") " pod="metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.716035 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cd92ea76-f379-4b3d-aac6-2143d789e086-webhook-cert\") pod \"metallb-operator-webhook-server-798f6d5f9c-6p988\" (UID: \"cd92ea76-f379-4b3d-aac6-2143d789e086\") " pod="metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.743011 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cd92ea76-f379-4b3d-aac6-2143d789e086-webhook-cert\") pod \"metallb-operator-webhook-server-798f6d5f9c-6p988\" (UID: \"cd92ea76-f379-4b3d-aac6-2143d789e086\") " pod="metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.755622 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cd92ea76-f379-4b3d-aac6-2143d789e086-apiservice-cert\") pod \"metallb-operator-webhook-server-798f6d5f9c-6p988\" (UID: \"cd92ea76-f379-4b3d-aac6-2143d789e086\") " pod="metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.758706 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r48lw\" (UniqueName: \"kubernetes.io/projected/cd92ea76-f379-4b3d-aac6-2143d789e086-kube-api-access-r48lw\") pod \"metallb-operator-webhook-server-798f6d5f9c-6p988\" (UID: \"cd92ea76-f379-4b3d-aac6-2143d789e086\") " pod="metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988" Oct 03 08:51:59 crc kubenswrapper[4899]: I1003 08:51:59.787771 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988" Oct 03 08:52:00 crc kubenswrapper[4899]: I1003 08:52:00.047529 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988"] Oct 03 08:52:00 crc kubenswrapper[4899]: W1003 08:52:00.058376 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcd92ea76_f379_4b3d_aac6_2143d789e086.slice/crio-f02c1bbf87c723cabc3fb4d3bf7c859e0eb204917b57c309929276eead16c491 WatchSource:0}: Error finding container f02c1bbf87c723cabc3fb4d3bf7c859e0eb204917b57c309929276eead16c491: Status 404 returned error can't find the container with id f02c1bbf87c723cabc3fb4d3bf7c859e0eb204917b57c309929276eead16c491 Oct 03 08:52:00 crc kubenswrapper[4899]: I1003 08:52:00.076553 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh"] Oct 03 08:52:00 crc kubenswrapper[4899]: W1003 08:52:00.081648 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8643a4de_b352_4699_8054_7d4e4f97a946.slice/crio-0d7723604ce08a1da034605ed8dcc8679d6bb2aa48c868dc8e1d1891b31bc63c WatchSource:0}: Error finding container 0d7723604ce08a1da034605ed8dcc8679d6bb2aa48c868dc8e1d1891b31bc63c: Status 404 returned error can't find the container with id 0d7723604ce08a1da034605ed8dcc8679d6bb2aa48c868dc8e1d1891b31bc63c Oct 03 08:52:00 crc kubenswrapper[4899]: I1003 08:52:00.795959 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh" event={"ID":"8643a4de-b352-4699-8054-7d4e4f97a946","Type":"ContainerStarted","Data":"0d7723604ce08a1da034605ed8dcc8679d6bb2aa48c868dc8e1d1891b31bc63c"} Oct 03 08:52:00 crc kubenswrapper[4899]: I1003 08:52:00.796863 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988" event={"ID":"cd92ea76-f379-4b3d-aac6-2143d789e086","Type":"ContainerStarted","Data":"f02c1bbf87c723cabc3fb4d3bf7c859e0eb204917b57c309929276eead16c491"} Oct 03 08:52:04 crc kubenswrapper[4899]: I1003 08:52:04.819933 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988" event={"ID":"cd92ea76-f379-4b3d-aac6-2143d789e086","Type":"ContainerStarted","Data":"e79907d1711eb61348223c1afd60a7f01ff173d5d225853bb968a750fcbdae3b"} Oct 03 08:52:04 crc kubenswrapper[4899]: I1003 08:52:04.820523 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988" Oct 03 08:52:04 crc kubenswrapper[4899]: I1003 08:52:04.821470 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh" event={"ID":"8643a4de-b352-4699-8054-7d4e4f97a946","Type":"ContainerStarted","Data":"23a94d4eb35cdb718afd808fca5012297d01b632be2e259723d31074eaa54345"} Oct 03 08:52:04 crc kubenswrapper[4899]: I1003 08:52:04.821649 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh" Oct 03 08:52:04 crc kubenswrapper[4899]: I1003 08:52:04.836633 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988" podStartSLOduration=1.468962133 podStartE2EDuration="5.836617674s" podCreationTimestamp="2025-10-03 08:51:59 +0000 UTC" firstStartedPulling="2025-10-03 08:52:00.061103387 +0000 UTC m=+694.168588340" lastFinishedPulling="2025-10-03 08:52:04.428758928 +0000 UTC m=+698.536243881" observedRunningTime="2025-10-03 08:52:04.835597822 +0000 UTC m=+698.943082775" watchObservedRunningTime="2025-10-03 08:52:04.836617674 +0000 UTC m=+698.944102627" Oct 03 08:52:04 crc kubenswrapper[4899]: I1003 08:52:04.863464 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh" podStartSLOduration=1.533790561 podStartE2EDuration="5.863449632s" podCreationTimestamp="2025-10-03 08:51:59 +0000 UTC" firstStartedPulling="2025-10-03 08:52:00.085425005 +0000 UTC m=+694.192909968" lastFinishedPulling="2025-10-03 08:52:04.415084076 +0000 UTC m=+698.522569039" observedRunningTime="2025-10-03 08:52:04.859197547 +0000 UTC m=+698.966682500" watchObservedRunningTime="2025-10-03 08:52:04.863449632 +0000 UTC m=+698.970934585" Oct 03 08:52:12 crc kubenswrapper[4899]: I1003 08:52:12.198293 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 08:52:12 crc kubenswrapper[4899]: I1003 08:52:12.198828 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 08:52:19 crc kubenswrapper[4899]: I1003 08:52:19.793655 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-798f6d5f9c-6p988" Oct 03 08:52:39 crc kubenswrapper[4899]: I1003 08:52:39.536956 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-7dd48c8965-pvfqh" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.296684 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-6t985"] Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.297462 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-6t985" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.301201 4899 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-s9z9r" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.301913 4899 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.303044 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-dq8r4"] Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.305472 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.307219 4899 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.307551 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.316984 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-6t985"] Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.396128 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-9jkt6"] Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.397264 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-9jkt6" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.399587 4899 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-tz54q" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.399757 4899 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.399763 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.400657 4899 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.406954 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-mgk7c"] Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.408056 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-mgk7c" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.409722 4899 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.417388 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-mgk7c"] Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.432649 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5db9v\" (UniqueName: \"kubernetes.io/projected/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-kube-api-access-5db9v\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.432710 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-metrics\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.432753 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-frr-startup\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.432791 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgmbs\" (UniqueName: \"kubernetes.io/projected/d43f846f-0e5f-4bb8-9041-6b471ca7e6df-kube-api-access-mgmbs\") pod \"frr-k8s-webhook-server-64bf5d555-6t985\" (UID: \"d43f846f-0e5f-4bb8-9041-6b471ca7e6df\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-6t985" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.432828 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d43f846f-0e5f-4bb8-9041-6b471ca7e6df-cert\") pod \"frr-k8s-webhook-server-64bf5d555-6t985\" (UID: \"d43f846f-0e5f-4bb8-9041-6b471ca7e6df\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-6t985" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.432855 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-metrics-certs\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.432884 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-frr-conf\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.432937 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-frr-sockets\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.432986 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-reloader\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.533750 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5db9v\" (UniqueName: \"kubernetes.io/projected/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-kube-api-access-5db9v\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.533791 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-metrics\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.533813 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsfz2\" (UniqueName: \"kubernetes.io/projected/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-kube-api-access-qsfz2\") pod \"speaker-9jkt6\" (UID: \"94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6\") " pod="metallb-system/speaker-9jkt6" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.533834 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8gf7\" (UniqueName: \"kubernetes.io/projected/8ec8f47a-3de7-4e04-a612-dbf72a0a21d5-kube-api-access-k8gf7\") pod \"controller-68d546b9d8-mgk7c\" (UID: \"8ec8f47a-3de7-4e04-a612-dbf72a0a21d5\") " pod="metallb-system/controller-68d546b9d8-mgk7c" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.533854 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-frr-startup\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.533868 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-memberlist\") pod \"speaker-9jkt6\" (UID: \"94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6\") " pod="metallb-system/speaker-9jkt6" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.533912 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgmbs\" (UniqueName: \"kubernetes.io/projected/d43f846f-0e5f-4bb8-9041-6b471ca7e6df-kube-api-access-mgmbs\") pod \"frr-k8s-webhook-server-64bf5d555-6t985\" (UID: \"d43f846f-0e5f-4bb8-9041-6b471ca7e6df\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-6t985" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.533942 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d43f846f-0e5f-4bb8-9041-6b471ca7e6df-cert\") pod \"frr-k8s-webhook-server-64bf5d555-6t985\" (UID: \"d43f846f-0e5f-4bb8-9041-6b471ca7e6df\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-6t985" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.533963 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-metrics-certs\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.533985 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-frr-conf\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.534006 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-metrics-certs\") pod \"speaker-9jkt6\" (UID: \"94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6\") " pod="metallb-system/speaker-9jkt6" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.534033 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-frr-sockets\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.534121 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8ec8f47a-3de7-4e04-a612-dbf72a0a21d5-metrics-certs\") pod \"controller-68d546b9d8-mgk7c\" (UID: \"8ec8f47a-3de7-4e04-a612-dbf72a0a21d5\") " pod="metallb-system/controller-68d546b9d8-mgk7c" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.534140 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-metallb-excludel2\") pod \"speaker-9jkt6\" (UID: \"94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6\") " pod="metallb-system/speaker-9jkt6" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.534155 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-reloader\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.534179 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8ec8f47a-3de7-4e04-a612-dbf72a0a21d5-cert\") pod \"controller-68d546b9d8-mgk7c\" (UID: \"8ec8f47a-3de7-4e04-a612-dbf72a0a21d5\") " pod="metallb-system/controller-68d546b9d8-mgk7c" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.534242 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-metrics\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: E1003 08:52:40.534268 4899 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Oct 03 08:52:40 crc kubenswrapper[4899]: E1003 08:52:40.534303 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-metrics-certs podName:b1b20a0b-2a5d-472e-8ccd-e4e89d466eed nodeName:}" failed. No retries permitted until 2025-10-03 08:52:41.034289779 +0000 UTC m=+735.141774732 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-metrics-certs") pod "frr-k8s-dq8r4" (UID: "b1b20a0b-2a5d-472e-8ccd-e4e89d466eed") : secret "frr-k8s-certs-secret" not found Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.534445 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-frr-sockets\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.534635 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-reloader\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.534682 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-frr-conf\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.535665 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-frr-startup\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.540596 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d43f846f-0e5f-4bb8-9041-6b471ca7e6df-cert\") pod \"frr-k8s-webhook-server-64bf5d555-6t985\" (UID: \"d43f846f-0e5f-4bb8-9041-6b471ca7e6df\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-6t985" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.551780 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5db9v\" (UniqueName: \"kubernetes.io/projected/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-kube-api-access-5db9v\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.555793 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgmbs\" (UniqueName: \"kubernetes.io/projected/d43f846f-0e5f-4bb8-9041-6b471ca7e6df-kube-api-access-mgmbs\") pod \"frr-k8s-webhook-server-64bf5d555-6t985\" (UID: \"d43f846f-0e5f-4bb8-9041-6b471ca7e6df\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-6t985" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.625144 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-6t985" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.634720 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-metrics-certs\") pod \"speaker-9jkt6\" (UID: \"94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6\") " pod="metallb-system/speaker-9jkt6" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.634810 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8ec8f47a-3de7-4e04-a612-dbf72a0a21d5-metrics-certs\") pod \"controller-68d546b9d8-mgk7c\" (UID: \"8ec8f47a-3de7-4e04-a612-dbf72a0a21d5\") " pod="metallb-system/controller-68d546b9d8-mgk7c" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.634834 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-metallb-excludel2\") pod \"speaker-9jkt6\" (UID: \"94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6\") " pod="metallb-system/speaker-9jkt6" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.634858 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8ec8f47a-3de7-4e04-a612-dbf72a0a21d5-cert\") pod \"controller-68d546b9d8-mgk7c\" (UID: \"8ec8f47a-3de7-4e04-a612-dbf72a0a21d5\") " pod="metallb-system/controller-68d546b9d8-mgk7c" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.634914 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsfz2\" (UniqueName: \"kubernetes.io/projected/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-kube-api-access-qsfz2\") pod \"speaker-9jkt6\" (UID: \"94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6\") " pod="metallb-system/speaker-9jkt6" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.634942 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8gf7\" (UniqueName: \"kubernetes.io/projected/8ec8f47a-3de7-4e04-a612-dbf72a0a21d5-kube-api-access-k8gf7\") pod \"controller-68d546b9d8-mgk7c\" (UID: \"8ec8f47a-3de7-4e04-a612-dbf72a0a21d5\") " pod="metallb-system/controller-68d546b9d8-mgk7c" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.634966 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-memberlist\") pod \"speaker-9jkt6\" (UID: \"94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6\") " pod="metallb-system/speaker-9jkt6" Oct 03 08:52:40 crc kubenswrapper[4899]: E1003 08:52:40.635107 4899 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 03 08:52:40 crc kubenswrapper[4899]: E1003 08:52:40.635164 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-memberlist podName:94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6 nodeName:}" failed. No retries permitted until 2025-10-03 08:52:41.135145046 +0000 UTC m=+735.242629999 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-memberlist") pod "speaker-9jkt6" (UID: "94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6") : secret "metallb-memberlist" not found Oct 03 08:52:40 crc kubenswrapper[4899]: E1003 08:52:40.635218 4899 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Oct 03 08:52:40 crc kubenswrapper[4899]: E1003 08:52:40.635244 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-metrics-certs podName:94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6 nodeName:}" failed. No retries permitted until 2025-10-03 08:52:41.135235588 +0000 UTC m=+735.242720541 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-metrics-certs") pod "speaker-9jkt6" (UID: "94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6") : secret "speaker-certs-secret" not found Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.636388 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-metallb-excludel2\") pod \"speaker-9jkt6\" (UID: \"94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6\") " pod="metallb-system/speaker-9jkt6" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.642654 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8ec8f47a-3de7-4e04-a612-dbf72a0a21d5-metrics-certs\") pod \"controller-68d546b9d8-mgk7c\" (UID: \"8ec8f47a-3de7-4e04-a612-dbf72a0a21d5\") " pod="metallb-system/controller-68d546b9d8-mgk7c" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.656991 4899 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.677463 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8gf7\" (UniqueName: \"kubernetes.io/projected/8ec8f47a-3de7-4e04-a612-dbf72a0a21d5-kube-api-access-k8gf7\") pod \"controller-68d546b9d8-mgk7c\" (UID: \"8ec8f47a-3de7-4e04-a612-dbf72a0a21d5\") " pod="metallb-system/controller-68d546b9d8-mgk7c" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.677971 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8ec8f47a-3de7-4e04-a612-dbf72a0a21d5-cert\") pod \"controller-68d546b9d8-mgk7c\" (UID: \"8ec8f47a-3de7-4e04-a612-dbf72a0a21d5\") " pod="metallb-system/controller-68d546b9d8-mgk7c" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.677972 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsfz2\" (UniqueName: \"kubernetes.io/projected/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-kube-api-access-qsfz2\") pod \"speaker-9jkt6\" (UID: \"94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6\") " pod="metallb-system/speaker-9jkt6" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.733972 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-mgk7c" Oct 03 08:52:40 crc kubenswrapper[4899]: I1003 08:52:40.902150 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-6t985"] Oct 03 08:52:41 crc kubenswrapper[4899]: I1003 08:52:41.004270 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-6t985" event={"ID":"d43f846f-0e5f-4bb8-9041-6b471ca7e6df","Type":"ContainerStarted","Data":"9c450e430da4f34c8ce4e47825ad0e5a5d723ae0abfe70bc19b19ed0b69cf849"} Oct 03 08:52:41 crc kubenswrapper[4899]: I1003 08:52:41.039682 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-metrics-certs\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:41 crc kubenswrapper[4899]: I1003 08:52:41.044645 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b1b20a0b-2a5d-472e-8ccd-e4e89d466eed-metrics-certs\") pod \"frr-k8s-dq8r4\" (UID: \"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed\") " pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:41 crc kubenswrapper[4899]: I1003 08:52:41.141049 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-memberlist\") pod \"speaker-9jkt6\" (UID: \"94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6\") " pod="metallb-system/speaker-9jkt6" Oct 03 08:52:41 crc kubenswrapper[4899]: I1003 08:52:41.141123 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-metrics-certs\") pod \"speaker-9jkt6\" (UID: \"94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6\") " pod="metallb-system/speaker-9jkt6" Oct 03 08:52:41 crc kubenswrapper[4899]: E1003 08:52:41.141219 4899 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 03 08:52:41 crc kubenswrapper[4899]: E1003 08:52:41.141288 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-memberlist podName:94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6 nodeName:}" failed. No retries permitted until 2025-10-03 08:52:42.141262795 +0000 UTC m=+736.248747748 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-memberlist") pod "speaker-9jkt6" (UID: "94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6") : secret "metallb-memberlist" not found Oct 03 08:52:41 crc kubenswrapper[4899]: I1003 08:52:41.146392 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-metrics-certs\") pod \"speaker-9jkt6\" (UID: \"94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6\") " pod="metallb-system/speaker-9jkt6" Oct 03 08:52:41 crc kubenswrapper[4899]: I1003 08:52:41.147058 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-mgk7c"] Oct 03 08:52:41 crc kubenswrapper[4899]: W1003 08:52:41.155732 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ec8f47a_3de7_4e04_a612_dbf72a0a21d5.slice/crio-5679733557bc636e9bc244d65c4181cc9541f1c0e2f4c27647f10d7604a091e4 WatchSource:0}: Error finding container 5679733557bc636e9bc244d65c4181cc9541f1c0e2f4c27647f10d7604a091e4: Status 404 returned error can't find the container with id 5679733557bc636e9bc244d65c4181cc9541f1c0e2f4c27647f10d7604a091e4 Oct 03 08:52:41 crc kubenswrapper[4899]: I1003 08:52:41.233736 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:42 crc kubenswrapper[4899]: I1003 08:52:42.011262 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dq8r4" event={"ID":"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed","Type":"ContainerStarted","Data":"571b8bcc3ee49b9735e5d521fb7d306e191108e801543a84c4cc6dd1fc88ef87"} Oct 03 08:52:42 crc kubenswrapper[4899]: I1003 08:52:42.014263 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-mgk7c" event={"ID":"8ec8f47a-3de7-4e04-a612-dbf72a0a21d5","Type":"ContainerStarted","Data":"9944ae018eead330b8a63a7231f8356a19fdf86163325609d45bcdb1fe6473db"} Oct 03 08:52:42 crc kubenswrapper[4899]: I1003 08:52:42.014300 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-mgk7c" event={"ID":"8ec8f47a-3de7-4e04-a612-dbf72a0a21d5","Type":"ContainerStarted","Data":"11d9fca1739f75d7fe736f80f2668b479fc632274011d9ad635b9855c42073ad"} Oct 03 08:52:42 crc kubenswrapper[4899]: I1003 08:52:42.014311 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-mgk7c" event={"ID":"8ec8f47a-3de7-4e04-a612-dbf72a0a21d5","Type":"ContainerStarted","Data":"5679733557bc636e9bc244d65c4181cc9541f1c0e2f4c27647f10d7604a091e4"} Oct 03 08:52:42 crc kubenswrapper[4899]: I1003 08:52:42.015289 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-mgk7c" Oct 03 08:52:42 crc kubenswrapper[4899]: I1003 08:52:42.035968 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-mgk7c" podStartSLOduration=2.035948334 podStartE2EDuration="2.035948334s" podCreationTimestamp="2025-10-03 08:52:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:52:42.03072041 +0000 UTC m=+736.138205363" watchObservedRunningTime="2025-10-03 08:52:42.035948334 +0000 UTC m=+736.143433287" Oct 03 08:52:42 crc kubenswrapper[4899]: I1003 08:52:42.155086 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-memberlist\") pod \"speaker-9jkt6\" (UID: \"94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6\") " pod="metallb-system/speaker-9jkt6" Oct 03 08:52:42 crc kubenswrapper[4899]: I1003 08:52:42.160559 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6-memberlist\") pod \"speaker-9jkt6\" (UID: \"94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6\") " pod="metallb-system/speaker-9jkt6" Oct 03 08:52:42 crc kubenswrapper[4899]: I1003 08:52:42.198090 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 08:52:42 crc kubenswrapper[4899]: I1003 08:52:42.198154 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 08:52:42 crc kubenswrapper[4899]: I1003 08:52:42.212544 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-9jkt6" Oct 03 08:52:43 crc kubenswrapper[4899]: I1003 08:52:43.026467 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-9jkt6" event={"ID":"94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6","Type":"ContainerStarted","Data":"e2729499b9fecb7c0b13aaebd5ec0c956121cc03e50437e54f047e7c220596d6"} Oct 03 08:52:43 crc kubenswrapper[4899]: I1003 08:52:43.026772 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-9jkt6" event={"ID":"94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6","Type":"ContainerStarted","Data":"71e73a5ee15c3b921421e0206b78488c7544b0d08ac5c345ffaf3f3858ebf048"} Oct 03 08:52:43 crc kubenswrapper[4899]: I1003 08:52:43.026785 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-9jkt6" event={"ID":"94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6","Type":"ContainerStarted","Data":"b934bf78b6cc579be801c9783b2145d05fe6699e388d3540c1e2e243f8e13465"} Oct 03 08:52:43 crc kubenswrapper[4899]: I1003 08:52:43.027000 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-9jkt6" Oct 03 08:52:43 crc kubenswrapper[4899]: I1003 08:52:43.047487 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-9jkt6" podStartSLOduration=3.047468112 podStartE2EDuration="3.047468112s" podCreationTimestamp="2025-10-03 08:52:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:52:43.044405897 +0000 UTC m=+737.151890850" watchObservedRunningTime="2025-10-03 08:52:43.047468112 +0000 UTC m=+737.154953065" Oct 03 08:52:48 crc kubenswrapper[4899]: I1003 08:52:48.059484 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-6t985" event={"ID":"d43f846f-0e5f-4bb8-9041-6b471ca7e6df","Type":"ContainerStarted","Data":"74ecdabe883b5468ca99a0e448d633c7e546a0d7d044957e3cb8110695b5e858"} Oct 03 08:52:48 crc kubenswrapper[4899]: I1003 08:52:48.060072 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-6t985" Oct 03 08:52:48 crc kubenswrapper[4899]: I1003 08:52:48.061912 4899 generic.go:334] "Generic (PLEG): container finished" podID="b1b20a0b-2a5d-472e-8ccd-e4e89d466eed" containerID="839074a6e2db04927d6ca9fe63a9d5d5263ed4a7017fa582358a778811baac11" exitCode=0 Oct 03 08:52:48 crc kubenswrapper[4899]: I1003 08:52:48.061976 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dq8r4" event={"ID":"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed","Type":"ContainerDied","Data":"839074a6e2db04927d6ca9fe63a9d5d5263ed4a7017fa582358a778811baac11"} Oct 03 08:52:48 crc kubenswrapper[4899]: I1003 08:52:48.078825 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-6t985" podStartSLOduration=1.657609028 podStartE2EDuration="8.078787965s" podCreationTimestamp="2025-10-03 08:52:40 +0000 UTC" firstStartedPulling="2025-10-03 08:52:40.916830588 +0000 UTC m=+735.024315541" lastFinishedPulling="2025-10-03 08:52:47.338009525 +0000 UTC m=+741.445494478" observedRunningTime="2025-10-03 08:52:48.076310507 +0000 UTC m=+742.183795480" watchObservedRunningTime="2025-10-03 08:52:48.078787965 +0000 UTC m=+742.186272938" Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.069716 4899 generic.go:334] "Generic (PLEG): container finished" podID="b1b20a0b-2a5d-472e-8ccd-e4e89d466eed" containerID="8c9e7b222d82b4e847f94d1a67473d6d05aa0cca3797a1368e650b0926966f6a" exitCode=0 Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.070983 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dq8r4" event={"ID":"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed","Type":"ContainerDied","Data":"8c9e7b222d82b4e847f94d1a67473d6d05aa0cca3797a1368e650b0926966f6a"} Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.332066 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-hbstw"] Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.332287 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" podUID="19c551e7-757b-4136-a143-6b6aa8152c57" containerName="controller-manager" containerID="cri-o://d07e2c20a6dba85d720c85c3fa6e2cdcfee55f19c58246fb1ba077882cc14bf4" gracePeriod=30 Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.432657 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp"] Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.432912 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" podUID="a5a7bb2d-e3f4-4c2e-9d78-483724280890" containerName="route-controller-manager" containerID="cri-o://64176c8de8a6ea0ad7fdcdd96914e7b82c8ff8315989d3078a7f611297ab9b47" gracePeriod=30 Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.793543 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.853705 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.959746 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19c551e7-757b-4136-a143-6b6aa8152c57-client-ca\") pod \"19c551e7-757b-4136-a143-6b6aa8152c57\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.959803 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/19c551e7-757b-4136-a143-6b6aa8152c57-proxy-ca-bundles\") pod \"19c551e7-757b-4136-a143-6b6aa8152c57\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.959869 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a5a7bb2d-e3f4-4c2e-9d78-483724280890-client-ca\") pod \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\" (UID: \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\") " Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.959927 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19c551e7-757b-4136-a143-6b6aa8152c57-serving-cert\") pod \"19c551e7-757b-4136-a143-6b6aa8152c57\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.959968 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6t5k\" (UniqueName: \"kubernetes.io/projected/19c551e7-757b-4136-a143-6b6aa8152c57-kube-api-access-c6t5k\") pod \"19c551e7-757b-4136-a143-6b6aa8152c57\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.959998 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5a7bb2d-e3f4-4c2e-9d78-483724280890-config\") pod \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\" (UID: \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\") " Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.960023 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zl7zj\" (UniqueName: \"kubernetes.io/projected/a5a7bb2d-e3f4-4c2e-9d78-483724280890-kube-api-access-zl7zj\") pod \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\" (UID: \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\") " Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.960046 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19c551e7-757b-4136-a143-6b6aa8152c57-config\") pod \"19c551e7-757b-4136-a143-6b6aa8152c57\" (UID: \"19c551e7-757b-4136-a143-6b6aa8152c57\") " Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.960082 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5a7bb2d-e3f4-4c2e-9d78-483724280890-serving-cert\") pod \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\" (UID: \"a5a7bb2d-e3f4-4c2e-9d78-483724280890\") " Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.960491 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19c551e7-757b-4136-a143-6b6aa8152c57-client-ca" (OuterVolumeSpecName: "client-ca") pod "19c551e7-757b-4136-a143-6b6aa8152c57" (UID: "19c551e7-757b-4136-a143-6b6aa8152c57"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.960500 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19c551e7-757b-4136-a143-6b6aa8152c57-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "19c551e7-757b-4136-a143-6b6aa8152c57" (UID: "19c551e7-757b-4136-a143-6b6aa8152c57"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.960874 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5a7bb2d-e3f4-4c2e-9d78-483724280890-client-ca" (OuterVolumeSpecName: "client-ca") pod "a5a7bb2d-e3f4-4c2e-9d78-483724280890" (UID: "a5a7bb2d-e3f4-4c2e-9d78-483724280890"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.961521 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5a7bb2d-e3f4-4c2e-9d78-483724280890-config" (OuterVolumeSpecName: "config") pod "a5a7bb2d-e3f4-4c2e-9d78-483724280890" (UID: "a5a7bb2d-e3f4-4c2e-9d78-483724280890"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.961552 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19c551e7-757b-4136-a143-6b6aa8152c57-config" (OuterVolumeSpecName: "config") pod "19c551e7-757b-4136-a143-6b6aa8152c57" (UID: "19c551e7-757b-4136-a143-6b6aa8152c57"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.966112 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5a7bb2d-e3f4-4c2e-9d78-483724280890-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a5a7bb2d-e3f4-4c2e-9d78-483724280890" (UID: "a5a7bb2d-e3f4-4c2e-9d78-483724280890"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.966112 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19c551e7-757b-4136-a143-6b6aa8152c57-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "19c551e7-757b-4136-a143-6b6aa8152c57" (UID: "19c551e7-757b-4136-a143-6b6aa8152c57"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.966116 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5a7bb2d-e3f4-4c2e-9d78-483724280890-kube-api-access-zl7zj" (OuterVolumeSpecName: "kube-api-access-zl7zj") pod "a5a7bb2d-e3f4-4c2e-9d78-483724280890" (UID: "a5a7bb2d-e3f4-4c2e-9d78-483724280890"). InnerVolumeSpecName "kube-api-access-zl7zj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:52:49 crc kubenswrapper[4899]: I1003 08:52:49.966147 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19c551e7-757b-4136-a143-6b6aa8152c57-kube-api-access-c6t5k" (OuterVolumeSpecName: "kube-api-access-c6t5k") pod "19c551e7-757b-4136-a143-6b6aa8152c57" (UID: "19c551e7-757b-4136-a143-6b6aa8152c57"). InnerVolumeSpecName "kube-api-access-c6t5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.061916 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zl7zj\" (UniqueName: \"kubernetes.io/projected/a5a7bb2d-e3f4-4c2e-9d78-483724280890-kube-api-access-zl7zj\") on node \"crc\" DevicePath \"\"" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.061962 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19c551e7-757b-4136-a143-6b6aa8152c57-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.061973 4899 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5a7bb2d-e3f4-4c2e-9d78-483724280890-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.061983 4899 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19c551e7-757b-4136-a143-6b6aa8152c57-client-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.061991 4899 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/19c551e7-757b-4136-a143-6b6aa8152c57-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.062001 4899 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a5a7bb2d-e3f4-4c2e-9d78-483724280890-client-ca\") on node \"crc\" DevicePath \"\"" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.062011 4899 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19c551e7-757b-4136-a143-6b6aa8152c57-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.062020 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6t5k\" (UniqueName: \"kubernetes.io/projected/19c551e7-757b-4136-a143-6b6aa8152c57-kube-api-access-c6t5k\") on node \"crc\" DevicePath \"\"" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.062028 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5a7bb2d-e3f4-4c2e-9d78-483724280890-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.075943 4899 generic.go:334] "Generic (PLEG): container finished" podID="a5a7bb2d-e3f4-4c2e-9d78-483724280890" containerID="64176c8de8a6ea0ad7fdcdd96914e7b82c8ff8315989d3078a7f611297ab9b47" exitCode=0 Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.076027 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" event={"ID":"a5a7bb2d-e3f4-4c2e-9d78-483724280890","Type":"ContainerDied","Data":"64176c8de8a6ea0ad7fdcdd96914e7b82c8ff8315989d3078a7f611297ab9b47"} Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.076064 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" event={"ID":"a5a7bb2d-e3f4-4c2e-9d78-483724280890","Type":"ContainerDied","Data":"93ebd9ac231eed322b17ee13ea12f84d2c3f127706bfd32e8114c70344a9177e"} Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.076071 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.076102 4899 scope.go:117] "RemoveContainer" containerID="64176c8de8a6ea0ad7fdcdd96914e7b82c8ff8315989d3078a7f611297ab9b47" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.078332 4899 generic.go:334] "Generic (PLEG): container finished" podID="19c551e7-757b-4136-a143-6b6aa8152c57" containerID="d07e2c20a6dba85d720c85c3fa6e2cdcfee55f19c58246fb1ba077882cc14bf4" exitCode=0 Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.078388 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" event={"ID":"19c551e7-757b-4136-a143-6b6aa8152c57","Type":"ContainerDied","Data":"d07e2c20a6dba85d720c85c3fa6e2cdcfee55f19c58246fb1ba077882cc14bf4"} Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.078407 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" event={"ID":"19c551e7-757b-4136-a143-6b6aa8152c57","Type":"ContainerDied","Data":"31c0d64d06e866e28496d6e834c3236334be4fceac51f7b18d2b8e36b34db0d4"} Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.078422 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-hbstw" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.080467 4899 generic.go:334] "Generic (PLEG): container finished" podID="b1b20a0b-2a5d-472e-8ccd-e4e89d466eed" containerID="72c2e3493c62380c3c76fa736c3abbb395e198d8753a55d87b44673e8c00ec04" exitCode=0 Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.080491 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dq8r4" event={"ID":"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed","Type":"ContainerDied","Data":"72c2e3493c62380c3c76fa736c3abbb395e198d8753a55d87b44673e8c00ec04"} Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.107733 4899 scope.go:117] "RemoveContainer" containerID="64176c8de8a6ea0ad7fdcdd96914e7b82c8ff8315989d3078a7f611297ab9b47" Oct 03 08:52:50 crc kubenswrapper[4899]: E1003 08:52:50.108932 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64176c8de8a6ea0ad7fdcdd96914e7b82c8ff8315989d3078a7f611297ab9b47\": container with ID starting with 64176c8de8a6ea0ad7fdcdd96914e7b82c8ff8315989d3078a7f611297ab9b47 not found: ID does not exist" containerID="64176c8de8a6ea0ad7fdcdd96914e7b82c8ff8315989d3078a7f611297ab9b47" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.108975 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64176c8de8a6ea0ad7fdcdd96914e7b82c8ff8315989d3078a7f611297ab9b47"} err="failed to get container status \"64176c8de8a6ea0ad7fdcdd96914e7b82c8ff8315989d3078a7f611297ab9b47\": rpc error: code = NotFound desc = could not find container \"64176c8de8a6ea0ad7fdcdd96914e7b82c8ff8315989d3078a7f611297ab9b47\": container with ID starting with 64176c8de8a6ea0ad7fdcdd96914e7b82c8ff8315989d3078a7f611297ab9b47 not found: ID does not exist" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.109001 4899 scope.go:117] "RemoveContainer" containerID="d07e2c20a6dba85d720c85c3fa6e2cdcfee55f19c58246fb1ba077882cc14bf4" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.123666 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-hbstw"] Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.128089 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-hbstw"] Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.145072 4899 scope.go:117] "RemoveContainer" containerID="d07e2c20a6dba85d720c85c3fa6e2cdcfee55f19c58246fb1ba077882cc14bf4" Oct 03 08:52:50 crc kubenswrapper[4899]: E1003 08:52:50.145744 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d07e2c20a6dba85d720c85c3fa6e2cdcfee55f19c58246fb1ba077882cc14bf4\": container with ID starting with d07e2c20a6dba85d720c85c3fa6e2cdcfee55f19c58246fb1ba077882cc14bf4 not found: ID does not exist" containerID="d07e2c20a6dba85d720c85c3fa6e2cdcfee55f19c58246fb1ba077882cc14bf4" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.145786 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d07e2c20a6dba85d720c85c3fa6e2cdcfee55f19c58246fb1ba077882cc14bf4"} err="failed to get container status \"d07e2c20a6dba85d720c85c3fa6e2cdcfee55f19c58246fb1ba077882cc14bf4\": rpc error: code = NotFound desc = could not find container \"d07e2c20a6dba85d720c85c3fa6e2cdcfee55f19c58246fb1ba077882cc14bf4\": container with ID starting with d07e2c20a6dba85d720c85c3fa6e2cdcfee55f19c58246fb1ba077882cc14bf4 not found: ID does not exist" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.147789 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp"] Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.151099 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dtsxp"] Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.535776 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19c551e7-757b-4136-a143-6b6aa8152c57" path="/var/lib/kubelet/pods/19c551e7-757b-4136-a143-6b6aa8152c57/volumes" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.536588 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5a7bb2d-e3f4-4c2e-9d78-483724280890" path="/var/lib/kubelet/pods/a5a7bb2d-e3f4-4c2e-9d78-483724280890/volumes" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.960352 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn"] Oct 03 08:52:50 crc kubenswrapper[4899]: E1003 08:52:50.960605 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19c551e7-757b-4136-a143-6b6aa8152c57" containerName="controller-manager" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.960619 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="19c551e7-757b-4136-a143-6b6aa8152c57" containerName="controller-manager" Oct 03 08:52:50 crc kubenswrapper[4899]: E1003 08:52:50.960629 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5a7bb2d-e3f4-4c2e-9d78-483724280890" containerName="route-controller-manager" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.960636 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5a7bb2d-e3f4-4c2e-9d78-483724280890" containerName="route-controller-manager" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.960799 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5a7bb2d-e3f4-4c2e-9d78-483724280890" containerName="route-controller-manager" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.960824 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="19c551e7-757b-4136-a143-6b6aa8152c57" containerName="controller-manager" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.961260 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.965430 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.965486 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.965498 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.965578 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.965744 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.968500 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.974312 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w"] Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.975022 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.979121 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.980191 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.980399 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.980513 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.980614 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.980690 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 03 08:52:50 crc kubenswrapper[4899]: I1003 08:52:50.985574 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.005579 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn"] Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.070079 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w"] Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.072608 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwmgd\" (UniqueName: \"kubernetes.io/projected/171e6416-d4cb-4a1a-8536-cae1d70d9961-kube-api-access-xwmgd\") pod \"controller-manager-5fb6b97c4b-j2csn\" (UID: \"171e6416-d4cb-4a1a-8536-cae1d70d9961\") " pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.072684 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/171e6416-d4cb-4a1a-8536-cae1d70d9961-config\") pod \"controller-manager-5fb6b97c4b-j2csn\" (UID: \"171e6416-d4cb-4a1a-8536-cae1d70d9961\") " pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.072711 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/171e6416-d4cb-4a1a-8536-cae1d70d9961-proxy-ca-bundles\") pod \"controller-manager-5fb6b97c4b-j2csn\" (UID: \"171e6416-d4cb-4a1a-8536-cae1d70d9961\") " pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.072742 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/171e6416-d4cb-4a1a-8536-cae1d70d9961-client-ca\") pod \"controller-manager-5fb6b97c4b-j2csn\" (UID: \"171e6416-d4cb-4a1a-8536-cae1d70d9961\") " pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.072780 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/02febfb3-43f3-4ad8-be83-ea4c634bdbb5-config\") pod \"route-controller-manager-744b757ff4-mls6w\" (UID: \"02febfb3-43f3-4ad8-be83-ea4c634bdbb5\") " pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.072807 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvsjn\" (UniqueName: \"kubernetes.io/projected/02febfb3-43f3-4ad8-be83-ea4c634bdbb5-kube-api-access-rvsjn\") pod \"route-controller-manager-744b757ff4-mls6w\" (UID: \"02febfb3-43f3-4ad8-be83-ea4c634bdbb5\") " pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.072833 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/171e6416-d4cb-4a1a-8536-cae1d70d9961-serving-cert\") pod \"controller-manager-5fb6b97c4b-j2csn\" (UID: \"171e6416-d4cb-4a1a-8536-cae1d70d9961\") " pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.072867 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/02febfb3-43f3-4ad8-be83-ea4c634bdbb5-serving-cert\") pod \"route-controller-manager-744b757ff4-mls6w\" (UID: \"02febfb3-43f3-4ad8-be83-ea4c634bdbb5\") " pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.072915 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/02febfb3-43f3-4ad8-be83-ea4c634bdbb5-client-ca\") pod \"route-controller-manager-744b757ff4-mls6w\" (UID: \"02febfb3-43f3-4ad8-be83-ea4c634bdbb5\") " pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.092465 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dq8r4" event={"ID":"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed","Type":"ContainerStarted","Data":"f5591e4170430bc7b9905fae27cc006139945344622a5d24147e56c98e9e3132"} Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.092515 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dq8r4" event={"ID":"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed","Type":"ContainerStarted","Data":"e418b037a20c60858ec2d91335554c09d88eb1a98e2878a5ee4f4396ecdd58e5"} Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.092528 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dq8r4" event={"ID":"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed","Type":"ContainerStarted","Data":"d1042e2cbc1a6cb4b16129607887e73a20562ec064bd11a37fa0f8462bbc816e"} Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.092540 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dq8r4" event={"ID":"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed","Type":"ContainerStarted","Data":"a06c3a2bdf2afe5d54504000ee11ff5f6b2638b4239ba22f3040c4719d5c89f9"} Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.092550 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dq8r4" event={"ID":"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed","Type":"ContainerStarted","Data":"d65290c27b709a6dc646691216fa363edd9fb5d735ea99d4980cb7e806169a0f"} Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.174064 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/02febfb3-43f3-4ad8-be83-ea4c634bdbb5-client-ca\") pod \"route-controller-manager-744b757ff4-mls6w\" (UID: \"02febfb3-43f3-4ad8-be83-ea4c634bdbb5\") " pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.174123 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwmgd\" (UniqueName: \"kubernetes.io/projected/171e6416-d4cb-4a1a-8536-cae1d70d9961-kube-api-access-xwmgd\") pod \"controller-manager-5fb6b97c4b-j2csn\" (UID: \"171e6416-d4cb-4a1a-8536-cae1d70d9961\") " pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.174171 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/171e6416-d4cb-4a1a-8536-cae1d70d9961-config\") pod \"controller-manager-5fb6b97c4b-j2csn\" (UID: \"171e6416-d4cb-4a1a-8536-cae1d70d9961\") " pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.174191 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/171e6416-d4cb-4a1a-8536-cae1d70d9961-proxy-ca-bundles\") pod \"controller-manager-5fb6b97c4b-j2csn\" (UID: \"171e6416-d4cb-4a1a-8536-cae1d70d9961\") " pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.174222 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/171e6416-d4cb-4a1a-8536-cae1d70d9961-client-ca\") pod \"controller-manager-5fb6b97c4b-j2csn\" (UID: \"171e6416-d4cb-4a1a-8536-cae1d70d9961\") " pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.174265 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/02febfb3-43f3-4ad8-be83-ea4c634bdbb5-config\") pod \"route-controller-manager-744b757ff4-mls6w\" (UID: \"02febfb3-43f3-4ad8-be83-ea4c634bdbb5\") " pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.174291 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvsjn\" (UniqueName: \"kubernetes.io/projected/02febfb3-43f3-4ad8-be83-ea4c634bdbb5-kube-api-access-rvsjn\") pod \"route-controller-manager-744b757ff4-mls6w\" (UID: \"02febfb3-43f3-4ad8-be83-ea4c634bdbb5\") " pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.174318 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/171e6416-d4cb-4a1a-8536-cae1d70d9961-serving-cert\") pod \"controller-manager-5fb6b97c4b-j2csn\" (UID: \"171e6416-d4cb-4a1a-8536-cae1d70d9961\") " pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.174404 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/02febfb3-43f3-4ad8-be83-ea4c634bdbb5-serving-cert\") pod \"route-controller-manager-744b757ff4-mls6w\" (UID: \"02febfb3-43f3-4ad8-be83-ea4c634bdbb5\") " pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.175718 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/171e6416-d4cb-4a1a-8536-cae1d70d9961-config\") pod \"controller-manager-5fb6b97c4b-j2csn\" (UID: \"171e6416-d4cb-4a1a-8536-cae1d70d9961\") " pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.175738 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/02febfb3-43f3-4ad8-be83-ea4c634bdbb5-client-ca\") pod \"route-controller-manager-744b757ff4-mls6w\" (UID: \"02febfb3-43f3-4ad8-be83-ea4c634bdbb5\") " pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.175914 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/171e6416-d4cb-4a1a-8536-cae1d70d9961-client-ca\") pod \"controller-manager-5fb6b97c4b-j2csn\" (UID: \"171e6416-d4cb-4a1a-8536-cae1d70d9961\") " pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.176367 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/02febfb3-43f3-4ad8-be83-ea4c634bdbb5-config\") pod \"route-controller-manager-744b757ff4-mls6w\" (UID: \"02febfb3-43f3-4ad8-be83-ea4c634bdbb5\") " pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.176525 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/171e6416-d4cb-4a1a-8536-cae1d70d9961-proxy-ca-bundles\") pod \"controller-manager-5fb6b97c4b-j2csn\" (UID: \"171e6416-d4cb-4a1a-8536-cae1d70d9961\") " pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.179427 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/171e6416-d4cb-4a1a-8536-cae1d70d9961-serving-cert\") pod \"controller-manager-5fb6b97c4b-j2csn\" (UID: \"171e6416-d4cb-4a1a-8536-cae1d70d9961\") " pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.188757 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/02febfb3-43f3-4ad8-be83-ea4c634bdbb5-serving-cert\") pod \"route-controller-manager-744b757ff4-mls6w\" (UID: \"02febfb3-43f3-4ad8-be83-ea4c634bdbb5\") " pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.191614 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvsjn\" (UniqueName: \"kubernetes.io/projected/02febfb3-43f3-4ad8-be83-ea4c634bdbb5-kube-api-access-rvsjn\") pod \"route-controller-manager-744b757ff4-mls6w\" (UID: \"02febfb3-43f3-4ad8-be83-ea4c634bdbb5\") " pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.192883 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwmgd\" (UniqueName: \"kubernetes.io/projected/171e6416-d4cb-4a1a-8536-cae1d70d9961-kube-api-access-xwmgd\") pod \"controller-manager-5fb6b97c4b-j2csn\" (UID: \"171e6416-d4cb-4a1a-8536-cae1d70d9961\") " pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.296286 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.322588 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.548217 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w"] Oct 03 08:52:51 crc kubenswrapper[4899]: W1003 08:52:51.555576 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod02febfb3_43f3_4ad8_be83_ea4c634bdbb5.slice/crio-a2fd3e9d7954ed9960f020221b272404ec05b5ec1e6fa6e737959c069d83e614 WatchSource:0}: Error finding container a2fd3e9d7954ed9960f020221b272404ec05b5ec1e6fa6e737959c069d83e614: Status 404 returned error can't find the container with id a2fd3e9d7954ed9960f020221b272404ec05b5ec1e6fa6e737959c069d83e614 Oct 03 08:52:51 crc kubenswrapper[4899]: I1003 08:52:51.717172 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn"] Oct 03 08:52:51 crc kubenswrapper[4899]: W1003 08:52:51.725023 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod171e6416_d4cb_4a1a_8536_cae1d70d9961.slice/crio-5c057c005f0400e932788868c4842ef2137c2f80740b7f08b9f036a71f44ae0b WatchSource:0}: Error finding container 5c057c005f0400e932788868c4842ef2137c2f80740b7f08b9f036a71f44ae0b: Status 404 returned error can't find the container with id 5c057c005f0400e932788868c4842ef2137c2f80740b7f08b9f036a71f44ae0b Oct 03 08:52:52 crc kubenswrapper[4899]: I1003 08:52:52.100539 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" event={"ID":"02febfb3-43f3-4ad8-be83-ea4c634bdbb5","Type":"ContainerStarted","Data":"2571d2aa5eb6a4961224c1b70e5aff8436bb2700bef01b4e6e25a0803480804a"} Oct 03 08:52:52 crc kubenswrapper[4899]: I1003 08:52:52.100589 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" event={"ID":"02febfb3-43f3-4ad8-be83-ea4c634bdbb5","Type":"ContainerStarted","Data":"a2fd3e9d7954ed9960f020221b272404ec05b5ec1e6fa6e737959c069d83e614"} Oct 03 08:52:52 crc kubenswrapper[4899]: I1003 08:52:52.101688 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" Oct 03 08:52:52 crc kubenswrapper[4899]: I1003 08:52:52.104746 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" event={"ID":"171e6416-d4cb-4a1a-8536-cae1d70d9961","Type":"ContainerStarted","Data":"a64cde6aa50318df5f1feff5dd114f9e3fb6df85fcf502c4372676d79ec8b5b2"} Oct 03 08:52:52 crc kubenswrapper[4899]: I1003 08:52:52.104776 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" event={"ID":"171e6416-d4cb-4a1a-8536-cae1d70d9961","Type":"ContainerStarted","Data":"5c057c005f0400e932788868c4842ef2137c2f80740b7f08b9f036a71f44ae0b"} Oct 03 08:52:52 crc kubenswrapper[4899]: I1003 08:52:52.105472 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:52 crc kubenswrapper[4899]: I1003 08:52:52.110256 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" Oct 03 08:52:52 crc kubenswrapper[4899]: I1003 08:52:52.111382 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dq8r4" event={"ID":"b1b20a0b-2a5d-472e-8ccd-e4e89d466eed","Type":"ContainerStarted","Data":"cd99c38b69e78319f72175db4d55badbb87193b721eff4499f85e2925edb4ee1"} Oct 03 08:52:52 crc kubenswrapper[4899]: I1003 08:52:52.111507 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:52 crc kubenswrapper[4899]: I1003 08:52:52.129006 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" Oct 03 08:52:52 crc kubenswrapper[4899]: I1003 08:52:52.134267 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-744b757ff4-mls6w" podStartSLOduration=3.134245574 podStartE2EDuration="3.134245574s" podCreationTimestamp="2025-10-03 08:52:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:52:52.122914618 +0000 UTC m=+746.230399571" watchObservedRunningTime="2025-10-03 08:52:52.134245574 +0000 UTC m=+746.241730527" Oct 03 08:52:52 crc kubenswrapper[4899]: I1003 08:52:52.159784 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-dq8r4" podStartSLOduration=6.131384443 podStartE2EDuration="12.159759958s" podCreationTimestamp="2025-10-03 08:52:40 +0000 UTC" firstStartedPulling="2025-10-03 08:52:41.325636903 +0000 UTC m=+735.433121856" lastFinishedPulling="2025-10-03 08:52:47.354012418 +0000 UTC m=+741.461497371" observedRunningTime="2025-10-03 08:52:52.15951904 +0000 UTC m=+746.267004003" watchObservedRunningTime="2025-10-03 08:52:52.159759958 +0000 UTC m=+746.267244911" Oct 03 08:52:52 crc kubenswrapper[4899]: I1003 08:52:52.184493 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5fb6b97c4b-j2csn" podStartSLOduration=3.184474156 podStartE2EDuration="3.184474156s" podCreationTimestamp="2025-10-03 08:52:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:52:52.181301146 +0000 UTC m=+746.288786109" watchObservedRunningTime="2025-10-03 08:52:52.184474156 +0000 UTC m=+746.291959109" Oct 03 08:52:52 crc kubenswrapper[4899]: I1003 08:52:52.230353 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-9jkt6" Oct 03 08:52:55 crc kubenswrapper[4899]: I1003 08:52:55.168220 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-2dfdl"] Oct 03 08:52:55 crc kubenswrapper[4899]: I1003 08:52:55.169582 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2dfdl" Oct 03 08:52:55 crc kubenswrapper[4899]: I1003 08:52:55.171368 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 03 08:52:55 crc kubenswrapper[4899]: I1003 08:52:55.186282 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2dfdl"] Oct 03 08:52:55 crc kubenswrapper[4899]: I1003 08:52:55.193447 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 03 08:52:55 crc kubenswrapper[4899]: I1003 08:52:55.335192 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bl87n\" (UniqueName: \"kubernetes.io/projected/61a9b5cb-28b0-4e6e-af61-4d785713b40e-kube-api-access-bl87n\") pod \"openstack-operator-index-2dfdl\" (UID: \"61a9b5cb-28b0-4e6e-af61-4d785713b40e\") " pod="openstack-operators/openstack-operator-index-2dfdl" Oct 03 08:52:55 crc kubenswrapper[4899]: I1003 08:52:55.436261 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bl87n\" (UniqueName: \"kubernetes.io/projected/61a9b5cb-28b0-4e6e-af61-4d785713b40e-kube-api-access-bl87n\") pod \"openstack-operator-index-2dfdl\" (UID: \"61a9b5cb-28b0-4e6e-af61-4d785713b40e\") " pod="openstack-operators/openstack-operator-index-2dfdl" Oct 03 08:52:55 crc kubenswrapper[4899]: I1003 08:52:55.463506 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bl87n\" (UniqueName: \"kubernetes.io/projected/61a9b5cb-28b0-4e6e-af61-4d785713b40e-kube-api-access-bl87n\") pod \"openstack-operator-index-2dfdl\" (UID: \"61a9b5cb-28b0-4e6e-af61-4d785713b40e\") " pod="openstack-operators/openstack-operator-index-2dfdl" Oct 03 08:52:55 crc kubenswrapper[4899]: I1003 08:52:55.494710 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2dfdl" Oct 03 08:52:55 crc kubenswrapper[4899]: I1003 08:52:55.914129 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2dfdl"] Oct 03 08:52:56 crc kubenswrapper[4899]: I1003 08:52:56.136042 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2dfdl" event={"ID":"61a9b5cb-28b0-4e6e-af61-4d785713b40e","Type":"ContainerStarted","Data":"f03317ff35ef5343f530aeb472bdad770eece5e3a2c4fa7111e00da0c58d9a47"} Oct 03 08:52:56 crc kubenswrapper[4899]: I1003 08:52:56.234945 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:56 crc kubenswrapper[4899]: I1003 08:52:56.282117 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:52:58 crc kubenswrapper[4899]: I1003 08:52:58.555261 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-2dfdl"] Oct 03 08:52:59 crc kubenswrapper[4899]: I1003 08:52:59.155144 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2dfdl" event={"ID":"61a9b5cb-28b0-4e6e-af61-4d785713b40e","Type":"ContainerStarted","Data":"05f2527936954d1f5da82de85025b887377064cb236e7cc606cd6c69039ab405"} Oct 03 08:52:59 crc kubenswrapper[4899]: I1003 08:52:59.155260 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-2dfdl" podUID="61a9b5cb-28b0-4e6e-af61-4d785713b40e" containerName="registry-server" containerID="cri-o://05f2527936954d1f5da82de85025b887377064cb236e7cc606cd6c69039ab405" gracePeriod=2 Oct 03 08:52:59 crc kubenswrapper[4899]: I1003 08:52:59.161104 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-cq96k"] Oct 03 08:52:59 crc kubenswrapper[4899]: I1003 08:52:59.162172 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-cq96k" Oct 03 08:52:59 crc kubenswrapper[4899]: I1003 08:52:59.167358 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-cq96k"] Oct 03 08:52:59 crc kubenswrapper[4899]: I1003 08:52:59.168411 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-qvt66" Oct 03 08:52:59 crc kubenswrapper[4899]: I1003 08:52:59.181662 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-2dfdl" podStartSLOduration=2.313907229 podStartE2EDuration="4.181643852s" podCreationTimestamp="2025-10-03 08:52:55 +0000 UTC" firstStartedPulling="2025-10-03 08:52:55.920739263 +0000 UTC m=+750.028224216" lastFinishedPulling="2025-10-03 08:52:57.788475886 +0000 UTC m=+751.895960839" observedRunningTime="2025-10-03 08:52:59.175591742 +0000 UTC m=+753.283076695" watchObservedRunningTime="2025-10-03 08:52:59.181643852 +0000 UTC m=+753.289128805" Oct 03 08:52:59 crc kubenswrapper[4899]: I1003 08:52:59.287147 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vgrt\" (UniqueName: \"kubernetes.io/projected/3e217d24-f3ae-48f2-87bb-d9b735659f5d-kube-api-access-9vgrt\") pod \"openstack-operator-index-cq96k\" (UID: \"3e217d24-f3ae-48f2-87bb-d9b735659f5d\") " pod="openstack-operators/openstack-operator-index-cq96k" Oct 03 08:52:59 crc kubenswrapper[4899]: I1003 08:52:59.388293 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vgrt\" (UniqueName: \"kubernetes.io/projected/3e217d24-f3ae-48f2-87bb-d9b735659f5d-kube-api-access-9vgrt\") pod \"openstack-operator-index-cq96k\" (UID: \"3e217d24-f3ae-48f2-87bb-d9b735659f5d\") " pod="openstack-operators/openstack-operator-index-cq96k" Oct 03 08:52:59 crc kubenswrapper[4899]: I1003 08:52:59.407083 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vgrt\" (UniqueName: \"kubernetes.io/projected/3e217d24-f3ae-48f2-87bb-d9b735659f5d-kube-api-access-9vgrt\") pod \"openstack-operator-index-cq96k\" (UID: \"3e217d24-f3ae-48f2-87bb-d9b735659f5d\") " pod="openstack-operators/openstack-operator-index-cq96k" Oct 03 08:52:59 crc kubenswrapper[4899]: I1003 08:52:59.534498 4899 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 03 08:52:59 crc kubenswrapper[4899]: I1003 08:52:59.554534 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-cq96k" Oct 03 08:52:59 crc kubenswrapper[4899]: I1003 08:52:59.635969 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2dfdl" Oct 03 08:52:59 crc kubenswrapper[4899]: I1003 08:52:59.795409 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bl87n\" (UniqueName: \"kubernetes.io/projected/61a9b5cb-28b0-4e6e-af61-4d785713b40e-kube-api-access-bl87n\") pod \"61a9b5cb-28b0-4e6e-af61-4d785713b40e\" (UID: \"61a9b5cb-28b0-4e6e-af61-4d785713b40e\") " Oct 03 08:52:59 crc kubenswrapper[4899]: I1003 08:52:59.801019 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61a9b5cb-28b0-4e6e-af61-4d785713b40e-kube-api-access-bl87n" (OuterVolumeSpecName: "kube-api-access-bl87n") pod "61a9b5cb-28b0-4e6e-af61-4d785713b40e" (UID: "61a9b5cb-28b0-4e6e-af61-4d785713b40e"). InnerVolumeSpecName "kube-api-access-bl87n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:52:59 crc kubenswrapper[4899]: I1003 08:52:59.896938 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bl87n\" (UniqueName: \"kubernetes.io/projected/61a9b5cb-28b0-4e6e-af61-4d785713b40e-kube-api-access-bl87n\") on node \"crc\" DevicePath \"\"" Oct 03 08:52:59 crc kubenswrapper[4899]: I1003 08:52:59.970408 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-cq96k"] Oct 03 08:52:59 crc kubenswrapper[4899]: W1003 08:52:59.977908 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3e217d24_f3ae_48f2_87bb_d9b735659f5d.slice/crio-84584f038ba74766a2d51b3baa849eef8568758e178931593074481a71cfca69 WatchSource:0}: Error finding container 84584f038ba74766a2d51b3baa849eef8568758e178931593074481a71cfca69: Status 404 returned error can't find the container with id 84584f038ba74766a2d51b3baa849eef8568758e178931593074481a71cfca69 Oct 03 08:53:00 crc kubenswrapper[4899]: I1003 08:53:00.162418 4899 generic.go:334] "Generic (PLEG): container finished" podID="61a9b5cb-28b0-4e6e-af61-4d785713b40e" containerID="05f2527936954d1f5da82de85025b887377064cb236e7cc606cd6c69039ab405" exitCode=0 Oct 03 08:53:00 crc kubenswrapper[4899]: I1003 08:53:00.162484 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2dfdl" event={"ID":"61a9b5cb-28b0-4e6e-af61-4d785713b40e","Type":"ContainerDied","Data":"05f2527936954d1f5da82de85025b887377064cb236e7cc606cd6c69039ab405"} Oct 03 08:53:00 crc kubenswrapper[4899]: I1003 08:53:00.162494 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2dfdl" Oct 03 08:53:00 crc kubenswrapper[4899]: I1003 08:53:00.162509 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2dfdl" event={"ID":"61a9b5cb-28b0-4e6e-af61-4d785713b40e","Type":"ContainerDied","Data":"f03317ff35ef5343f530aeb472bdad770eece5e3a2c4fa7111e00da0c58d9a47"} Oct 03 08:53:00 crc kubenswrapper[4899]: I1003 08:53:00.162568 4899 scope.go:117] "RemoveContainer" containerID="05f2527936954d1f5da82de85025b887377064cb236e7cc606cd6c69039ab405" Oct 03 08:53:00 crc kubenswrapper[4899]: I1003 08:53:00.164243 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-cq96k" event={"ID":"3e217d24-f3ae-48f2-87bb-d9b735659f5d","Type":"ContainerStarted","Data":"e192e746685ae61c55ac5bce8bc4bd0b933a8de8177e7d16e360028e708186db"} Oct 03 08:53:00 crc kubenswrapper[4899]: I1003 08:53:00.164267 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-cq96k" event={"ID":"3e217d24-f3ae-48f2-87bb-d9b735659f5d","Type":"ContainerStarted","Data":"84584f038ba74766a2d51b3baa849eef8568758e178931593074481a71cfca69"} Oct 03 08:53:00 crc kubenswrapper[4899]: I1003 08:53:00.176016 4899 scope.go:117] "RemoveContainer" containerID="05f2527936954d1f5da82de85025b887377064cb236e7cc606cd6c69039ab405" Oct 03 08:53:00 crc kubenswrapper[4899]: E1003 08:53:00.177373 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05f2527936954d1f5da82de85025b887377064cb236e7cc606cd6c69039ab405\": container with ID starting with 05f2527936954d1f5da82de85025b887377064cb236e7cc606cd6c69039ab405 not found: ID does not exist" containerID="05f2527936954d1f5da82de85025b887377064cb236e7cc606cd6c69039ab405" Oct 03 08:53:00 crc kubenswrapper[4899]: I1003 08:53:00.177406 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05f2527936954d1f5da82de85025b887377064cb236e7cc606cd6c69039ab405"} err="failed to get container status \"05f2527936954d1f5da82de85025b887377064cb236e7cc606cd6c69039ab405\": rpc error: code = NotFound desc = could not find container \"05f2527936954d1f5da82de85025b887377064cb236e7cc606cd6c69039ab405\": container with ID starting with 05f2527936954d1f5da82de85025b887377064cb236e7cc606cd6c69039ab405 not found: ID does not exist" Oct 03 08:53:00 crc kubenswrapper[4899]: I1003 08:53:00.184141 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-cq96k" podStartSLOduration=1.140230733 podStartE2EDuration="1.184123255s" podCreationTimestamp="2025-10-03 08:52:59 +0000 UTC" firstStartedPulling="2025-10-03 08:52:59.981309327 +0000 UTC m=+754.088794280" lastFinishedPulling="2025-10-03 08:53:00.025201849 +0000 UTC m=+754.132686802" observedRunningTime="2025-10-03 08:53:00.18236774 +0000 UTC m=+754.289852693" watchObservedRunningTime="2025-10-03 08:53:00.184123255 +0000 UTC m=+754.291608198" Oct 03 08:53:00 crc kubenswrapper[4899]: I1003 08:53:00.196206 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-2dfdl"] Oct 03 08:53:00 crc kubenswrapper[4899]: I1003 08:53:00.199494 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-2dfdl"] Oct 03 08:53:00 crc kubenswrapper[4899]: I1003 08:53:00.535297 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61a9b5cb-28b0-4e6e-af61-4d785713b40e" path="/var/lib/kubelet/pods/61a9b5cb-28b0-4e6e-af61-4d785713b40e/volumes" Oct 03 08:53:00 crc kubenswrapper[4899]: I1003 08:53:00.632406 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-6t985" Oct 03 08:53:00 crc kubenswrapper[4899]: I1003 08:53:00.739387 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-mgk7c" Oct 03 08:53:01 crc kubenswrapper[4899]: I1003 08:53:01.236169 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-dq8r4" Oct 03 08:53:09 crc kubenswrapper[4899]: I1003 08:53:09.555324 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-cq96k" Oct 03 08:53:09 crc kubenswrapper[4899]: I1003 08:53:09.557935 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-cq96k" Oct 03 08:53:09 crc kubenswrapper[4899]: I1003 08:53:09.586726 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-cq96k" Oct 03 08:53:10 crc kubenswrapper[4899]: I1003 08:53:10.241756 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-cq96k" Oct 03 08:53:11 crc kubenswrapper[4899]: I1003 08:53:11.985525 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d"] Oct 03 08:53:11 crc kubenswrapper[4899]: E1003 08:53:11.986071 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61a9b5cb-28b0-4e6e-af61-4d785713b40e" containerName="registry-server" Oct 03 08:53:11 crc kubenswrapper[4899]: I1003 08:53:11.986089 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="61a9b5cb-28b0-4e6e-af61-4d785713b40e" containerName="registry-server" Oct 03 08:53:11 crc kubenswrapper[4899]: I1003 08:53:11.986222 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="61a9b5cb-28b0-4e6e-af61-4d785713b40e" containerName="registry-server" Oct 03 08:53:11 crc kubenswrapper[4899]: I1003 08:53:11.987000 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d" Oct 03 08:53:11 crc kubenswrapper[4899]: I1003 08:53:11.989083 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-j7zkl" Oct 03 08:53:11 crc kubenswrapper[4899]: I1003 08:53:11.997454 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d"] Oct 03 08:53:12 crc kubenswrapper[4899]: I1003 08:53:12.063042 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4n2w2\" (UniqueName: \"kubernetes.io/projected/f5e06749-553d-438a-b1be-4db08df71d67-kube-api-access-4n2w2\") pod \"1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d\" (UID: \"f5e06749-553d-438a-b1be-4db08df71d67\") " pod="openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d" Oct 03 08:53:12 crc kubenswrapper[4899]: I1003 08:53:12.063087 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f5e06749-553d-438a-b1be-4db08df71d67-bundle\") pod \"1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d\" (UID: \"f5e06749-553d-438a-b1be-4db08df71d67\") " pod="openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d" Oct 03 08:53:12 crc kubenswrapper[4899]: I1003 08:53:12.063264 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f5e06749-553d-438a-b1be-4db08df71d67-util\") pod \"1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d\" (UID: \"f5e06749-553d-438a-b1be-4db08df71d67\") " pod="openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d" Oct 03 08:53:12 crc kubenswrapper[4899]: I1003 08:53:12.164727 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f5e06749-553d-438a-b1be-4db08df71d67-util\") pod \"1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d\" (UID: \"f5e06749-553d-438a-b1be-4db08df71d67\") " pod="openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d" Oct 03 08:53:12 crc kubenswrapper[4899]: I1003 08:53:12.165152 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4n2w2\" (UniqueName: \"kubernetes.io/projected/f5e06749-553d-438a-b1be-4db08df71d67-kube-api-access-4n2w2\") pod \"1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d\" (UID: \"f5e06749-553d-438a-b1be-4db08df71d67\") " pod="openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d" Oct 03 08:53:12 crc kubenswrapper[4899]: I1003 08:53:12.165251 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f5e06749-553d-438a-b1be-4db08df71d67-bundle\") pod \"1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d\" (UID: \"f5e06749-553d-438a-b1be-4db08df71d67\") " pod="openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d" Oct 03 08:53:12 crc kubenswrapper[4899]: I1003 08:53:12.165569 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f5e06749-553d-438a-b1be-4db08df71d67-util\") pod \"1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d\" (UID: \"f5e06749-553d-438a-b1be-4db08df71d67\") " pod="openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d" Oct 03 08:53:12 crc kubenswrapper[4899]: I1003 08:53:12.165695 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f5e06749-553d-438a-b1be-4db08df71d67-bundle\") pod \"1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d\" (UID: \"f5e06749-553d-438a-b1be-4db08df71d67\") " pod="openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d" Oct 03 08:53:12 crc kubenswrapper[4899]: I1003 08:53:12.198035 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4n2w2\" (UniqueName: \"kubernetes.io/projected/f5e06749-553d-438a-b1be-4db08df71d67-kube-api-access-4n2w2\") pod \"1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d\" (UID: \"f5e06749-553d-438a-b1be-4db08df71d67\") " pod="openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d" Oct 03 08:53:12 crc kubenswrapper[4899]: I1003 08:53:12.198740 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 08:53:12 crc kubenswrapper[4899]: I1003 08:53:12.198782 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 08:53:12 crc kubenswrapper[4899]: I1003 08:53:12.198826 4899 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:53:12 crc kubenswrapper[4899]: I1003 08:53:12.199449 4899 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0b65c263cdff33924e77f87b039bd41bb8cbe3269904b715ae19db8a65d88bf8"} pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 08:53:12 crc kubenswrapper[4899]: I1003 08:53:12.199516 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" containerID="cri-o://0b65c263cdff33924e77f87b039bd41bb8cbe3269904b715ae19db8a65d88bf8" gracePeriod=600 Oct 03 08:53:12 crc kubenswrapper[4899]: I1003 08:53:12.307363 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d" Oct 03 08:53:12 crc kubenswrapper[4899]: I1003 08:53:12.681711 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d"] Oct 03 08:53:12 crc kubenswrapper[4899]: W1003 08:53:12.683035 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf5e06749_553d_438a_b1be_4db08df71d67.slice/crio-2cff2abdd372bfc63925f08a96c3dcb6c2d9f41c4204d811b32acbabd04c82a8 WatchSource:0}: Error finding container 2cff2abdd372bfc63925f08a96c3dcb6c2d9f41c4204d811b32acbabd04c82a8: Status 404 returned error can't find the container with id 2cff2abdd372bfc63925f08a96c3dcb6c2d9f41c4204d811b32acbabd04c82a8 Oct 03 08:53:13 crc kubenswrapper[4899]: I1003 08:53:13.238532 4899 generic.go:334] "Generic (PLEG): container finished" podID="f5e06749-553d-438a-b1be-4db08df71d67" containerID="1a70608d6eeba323e38472b3e61c22d006e19fffd3c5b86a01e63bd976625e1b" exitCode=0 Oct 03 08:53:13 crc kubenswrapper[4899]: I1003 08:53:13.238606 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d" event={"ID":"f5e06749-553d-438a-b1be-4db08df71d67","Type":"ContainerDied","Data":"1a70608d6eeba323e38472b3e61c22d006e19fffd3c5b86a01e63bd976625e1b"} Oct 03 08:53:13 crc kubenswrapper[4899]: I1003 08:53:13.238912 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d" event={"ID":"f5e06749-553d-438a-b1be-4db08df71d67","Type":"ContainerStarted","Data":"2cff2abdd372bfc63925f08a96c3dcb6c2d9f41c4204d811b32acbabd04c82a8"} Oct 03 08:53:13 crc kubenswrapper[4899]: I1003 08:53:13.244309 4899 generic.go:334] "Generic (PLEG): container finished" podID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerID="0b65c263cdff33924e77f87b039bd41bb8cbe3269904b715ae19db8a65d88bf8" exitCode=0 Oct 03 08:53:13 crc kubenswrapper[4899]: I1003 08:53:13.244361 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerDied","Data":"0b65c263cdff33924e77f87b039bd41bb8cbe3269904b715ae19db8a65d88bf8"} Oct 03 08:53:13 crc kubenswrapper[4899]: I1003 08:53:13.244429 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerStarted","Data":"56a8af8272c5f0a0da6c3696ab9575cf28a2d27b7f650888f1dd37ac24669078"} Oct 03 08:53:13 crc kubenswrapper[4899]: I1003 08:53:13.244452 4899 scope.go:117] "RemoveContainer" containerID="6317475fe02e5878e0a0ad7c510b598f3249a496df5a7826416b0c9a1c2435dc" Oct 03 08:53:14 crc kubenswrapper[4899]: I1003 08:53:14.252048 4899 generic.go:334] "Generic (PLEG): container finished" podID="f5e06749-553d-438a-b1be-4db08df71d67" containerID="4f0ff52888491cf42a15699e860297a83b83421adc73708be6f4b42e4a6d99fc" exitCode=0 Oct 03 08:53:14 crc kubenswrapper[4899]: I1003 08:53:14.252125 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d" event={"ID":"f5e06749-553d-438a-b1be-4db08df71d67","Type":"ContainerDied","Data":"4f0ff52888491cf42a15699e860297a83b83421adc73708be6f4b42e4a6d99fc"} Oct 03 08:53:14 crc kubenswrapper[4899]: E1003 08:53:14.559710 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf5e06749_553d_438a_b1be_4db08df71d67.slice/crio-conmon-6b1827e746982559fca249fa78bd0118d59122f0667a4b1a1430fdfaab9475fc.scope\": RecentStats: unable to find data in memory cache]" Oct 03 08:53:15 crc kubenswrapper[4899]: I1003 08:53:15.270423 4899 generic.go:334] "Generic (PLEG): container finished" podID="f5e06749-553d-438a-b1be-4db08df71d67" containerID="6b1827e746982559fca249fa78bd0118d59122f0667a4b1a1430fdfaab9475fc" exitCode=0 Oct 03 08:53:15 crc kubenswrapper[4899]: I1003 08:53:15.270469 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d" event={"ID":"f5e06749-553d-438a-b1be-4db08df71d67","Type":"ContainerDied","Data":"6b1827e746982559fca249fa78bd0118d59122f0667a4b1a1430fdfaab9475fc"} Oct 03 08:53:16 crc kubenswrapper[4899]: I1003 08:53:16.637754 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d" Oct 03 08:53:16 crc kubenswrapper[4899]: I1003 08:53:16.731114 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4n2w2\" (UniqueName: \"kubernetes.io/projected/f5e06749-553d-438a-b1be-4db08df71d67-kube-api-access-4n2w2\") pod \"f5e06749-553d-438a-b1be-4db08df71d67\" (UID: \"f5e06749-553d-438a-b1be-4db08df71d67\") " Oct 03 08:53:16 crc kubenswrapper[4899]: I1003 08:53:16.731221 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f5e06749-553d-438a-b1be-4db08df71d67-bundle\") pod \"f5e06749-553d-438a-b1be-4db08df71d67\" (UID: \"f5e06749-553d-438a-b1be-4db08df71d67\") " Oct 03 08:53:16 crc kubenswrapper[4899]: I1003 08:53:16.731250 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f5e06749-553d-438a-b1be-4db08df71d67-util\") pod \"f5e06749-553d-438a-b1be-4db08df71d67\" (UID: \"f5e06749-553d-438a-b1be-4db08df71d67\") " Oct 03 08:53:16 crc kubenswrapper[4899]: I1003 08:53:16.732067 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5e06749-553d-438a-b1be-4db08df71d67-bundle" (OuterVolumeSpecName: "bundle") pod "f5e06749-553d-438a-b1be-4db08df71d67" (UID: "f5e06749-553d-438a-b1be-4db08df71d67"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:53:16 crc kubenswrapper[4899]: I1003 08:53:16.737343 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5e06749-553d-438a-b1be-4db08df71d67-kube-api-access-4n2w2" (OuterVolumeSpecName: "kube-api-access-4n2w2") pod "f5e06749-553d-438a-b1be-4db08df71d67" (UID: "f5e06749-553d-438a-b1be-4db08df71d67"). InnerVolumeSpecName "kube-api-access-4n2w2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:53:16 crc kubenswrapper[4899]: I1003 08:53:16.744292 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5e06749-553d-438a-b1be-4db08df71d67-util" (OuterVolumeSpecName: "util") pod "f5e06749-553d-438a-b1be-4db08df71d67" (UID: "f5e06749-553d-438a-b1be-4db08df71d67"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:53:16 crc kubenswrapper[4899]: I1003 08:53:16.832240 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4n2w2\" (UniqueName: \"kubernetes.io/projected/f5e06749-553d-438a-b1be-4db08df71d67-kube-api-access-4n2w2\") on node \"crc\" DevicePath \"\"" Oct 03 08:53:16 crc kubenswrapper[4899]: I1003 08:53:16.832281 4899 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f5e06749-553d-438a-b1be-4db08df71d67-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:53:16 crc kubenswrapper[4899]: I1003 08:53:16.832291 4899 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f5e06749-553d-438a-b1be-4db08df71d67-util\") on node \"crc\" DevicePath \"\"" Oct 03 08:53:17 crc kubenswrapper[4899]: I1003 08:53:17.285049 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d" event={"ID":"f5e06749-553d-438a-b1be-4db08df71d67","Type":"ContainerDied","Data":"2cff2abdd372bfc63925f08a96c3dcb6c2d9f41c4204d811b32acbabd04c82a8"} Oct 03 08:53:17 crc kubenswrapper[4899]: I1003 08:53:17.285396 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2cff2abdd372bfc63925f08a96c3dcb6c2d9f41c4204d811b32acbabd04c82a8" Oct 03 08:53:17 crc kubenswrapper[4899]: I1003 08:53:17.285131 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d" Oct 03 08:53:25 crc kubenswrapper[4899]: I1003 08:53:25.013037 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-669c8666b5-gt89m"] Oct 03 08:53:25 crc kubenswrapper[4899]: E1003 08:53:25.013740 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5e06749-553d-438a-b1be-4db08df71d67" containerName="util" Oct 03 08:53:25 crc kubenswrapper[4899]: I1003 08:53:25.013754 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5e06749-553d-438a-b1be-4db08df71d67" containerName="util" Oct 03 08:53:25 crc kubenswrapper[4899]: E1003 08:53:25.013767 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5e06749-553d-438a-b1be-4db08df71d67" containerName="extract" Oct 03 08:53:25 crc kubenswrapper[4899]: I1003 08:53:25.013773 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5e06749-553d-438a-b1be-4db08df71d67" containerName="extract" Oct 03 08:53:25 crc kubenswrapper[4899]: E1003 08:53:25.013781 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5e06749-553d-438a-b1be-4db08df71d67" containerName="pull" Oct 03 08:53:25 crc kubenswrapper[4899]: I1003 08:53:25.013788 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5e06749-553d-438a-b1be-4db08df71d67" containerName="pull" Oct 03 08:53:25 crc kubenswrapper[4899]: I1003 08:53:25.013907 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5e06749-553d-438a-b1be-4db08df71d67" containerName="extract" Oct 03 08:53:25 crc kubenswrapper[4899]: I1003 08:53:25.014573 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-669c8666b5-gt89m" Oct 03 08:53:25 crc kubenswrapper[4899]: I1003 08:53:25.016592 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-mcp54" Oct 03 08:53:25 crc kubenswrapper[4899]: I1003 08:53:25.038730 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-669c8666b5-gt89m"] Oct 03 08:53:25 crc kubenswrapper[4899]: I1003 08:53:25.135158 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24nd7\" (UniqueName: \"kubernetes.io/projected/99f6fa80-7e18-4fa5-8421-8ff2e51fbdbc-kube-api-access-24nd7\") pod \"openstack-operator-controller-operator-669c8666b5-gt89m\" (UID: \"99f6fa80-7e18-4fa5-8421-8ff2e51fbdbc\") " pod="openstack-operators/openstack-operator-controller-operator-669c8666b5-gt89m" Oct 03 08:53:25 crc kubenswrapper[4899]: I1003 08:53:25.236117 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24nd7\" (UniqueName: \"kubernetes.io/projected/99f6fa80-7e18-4fa5-8421-8ff2e51fbdbc-kube-api-access-24nd7\") pod \"openstack-operator-controller-operator-669c8666b5-gt89m\" (UID: \"99f6fa80-7e18-4fa5-8421-8ff2e51fbdbc\") " pod="openstack-operators/openstack-operator-controller-operator-669c8666b5-gt89m" Oct 03 08:53:25 crc kubenswrapper[4899]: I1003 08:53:25.254022 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24nd7\" (UniqueName: \"kubernetes.io/projected/99f6fa80-7e18-4fa5-8421-8ff2e51fbdbc-kube-api-access-24nd7\") pod \"openstack-operator-controller-operator-669c8666b5-gt89m\" (UID: \"99f6fa80-7e18-4fa5-8421-8ff2e51fbdbc\") " pod="openstack-operators/openstack-operator-controller-operator-669c8666b5-gt89m" Oct 03 08:53:25 crc kubenswrapper[4899]: I1003 08:53:25.332613 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-669c8666b5-gt89m" Oct 03 08:53:25 crc kubenswrapper[4899]: I1003 08:53:25.769211 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-669c8666b5-gt89m"] Oct 03 08:53:26 crc kubenswrapper[4899]: I1003 08:53:26.345197 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-669c8666b5-gt89m" event={"ID":"99f6fa80-7e18-4fa5-8421-8ff2e51fbdbc","Type":"ContainerStarted","Data":"42ec16b1c6c462df265dbd9427ff4dfb3e372139bd2b3aa511d5e1de2fd18ea9"} Oct 03 08:53:29 crc kubenswrapper[4899]: I1003 08:53:29.362716 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-669c8666b5-gt89m" event={"ID":"99f6fa80-7e18-4fa5-8421-8ff2e51fbdbc","Type":"ContainerStarted","Data":"bb835cf45d26f5207c5a515acb624e9115cbbc8587c58ffd2da3ac071c4b0fe6"} Oct 03 08:53:32 crc kubenswrapper[4899]: I1003 08:53:32.384710 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-669c8666b5-gt89m" event={"ID":"99f6fa80-7e18-4fa5-8421-8ff2e51fbdbc","Type":"ContainerStarted","Data":"58f6d7230536a3a2604ddf5cb6ef0149e9f4fccaaef9e4f3fd7005bcd0bcf6fb"} Oct 03 08:53:32 crc kubenswrapper[4899]: I1003 08:53:32.386761 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-669c8666b5-gt89m" Oct 03 08:53:35 crc kubenswrapper[4899]: I1003 08:53:35.336480 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-669c8666b5-gt89m" Oct 03 08:53:35 crc kubenswrapper[4899]: I1003 08:53:35.372980 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-669c8666b5-gt89m" podStartSLOduration=5.914039991 podStartE2EDuration="11.372960954s" podCreationTimestamp="2025-10-03 08:53:24 +0000 UTC" firstStartedPulling="2025-10-03 08:53:25.777659973 +0000 UTC m=+779.885144926" lastFinishedPulling="2025-10-03 08:53:31.236580936 +0000 UTC m=+785.344065889" observedRunningTime="2025-10-03 08:53:32.420009729 +0000 UTC m=+786.527494682" watchObservedRunningTime="2025-10-03 08:53:35.372960954 +0000 UTC m=+789.480445907" Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.398151 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gbhrz"] Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.399974 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gbhrz" Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.409484 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gbhrz"] Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.466867 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16-catalog-content\") pod \"community-operators-gbhrz\" (UID: \"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16\") " pod="openshift-marketplace/community-operators-gbhrz" Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.466938 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rl9cv\" (UniqueName: \"kubernetes.io/projected/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16-kube-api-access-rl9cv\") pod \"community-operators-gbhrz\" (UID: \"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16\") " pod="openshift-marketplace/community-operators-gbhrz" Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.467095 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16-utilities\") pod \"community-operators-gbhrz\" (UID: \"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16\") " pod="openshift-marketplace/community-operators-gbhrz" Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.567997 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16-catalog-content\") pod \"community-operators-gbhrz\" (UID: \"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16\") " pod="openshift-marketplace/community-operators-gbhrz" Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.568053 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rl9cv\" (UniqueName: \"kubernetes.io/projected/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16-kube-api-access-rl9cv\") pod \"community-operators-gbhrz\" (UID: \"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16\") " pod="openshift-marketplace/community-operators-gbhrz" Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.568116 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16-utilities\") pod \"community-operators-gbhrz\" (UID: \"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16\") " pod="openshift-marketplace/community-operators-gbhrz" Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.569026 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16-catalog-content\") pod \"community-operators-gbhrz\" (UID: \"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16\") " pod="openshift-marketplace/community-operators-gbhrz" Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.569129 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16-utilities\") pod \"community-operators-gbhrz\" (UID: \"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16\") " pod="openshift-marketplace/community-operators-gbhrz" Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.592616 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rl9cv\" (UniqueName: \"kubernetes.io/projected/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16-kube-api-access-rl9cv\") pod \"community-operators-gbhrz\" (UID: \"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16\") " pod="openshift-marketplace/community-operators-gbhrz" Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.719565 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gbhrz" Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.925758 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6c675fb79f-hz7qr"] Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.926914 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-hz7qr" Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.936615 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-z76dd" Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.940708 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-79d68d6c85-bj2gj"] Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.941842 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-bj2gj" Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.946031 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6c675fb79f-hz7qr"] Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.962709 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-fmlxp" Oct 03 08:53:51 crc kubenswrapper[4899]: I1003 08:53:51.971215 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-79d68d6c85-bj2gj"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.020385 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-jqgrz"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.022545 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-jqgrz" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.072052 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-846dff85b5-mhkqv"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.073516 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-mhkqv" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.074203 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-kfv5m" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.075469 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-846dff85b5-mhkqv"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.085248 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjvtb\" (UniqueName: \"kubernetes.io/projected/3e876467-fd1a-4b4c-b62b-d1641400a756-kube-api-access-gjvtb\") pod \"cinder-operator-controller-manager-79d68d6c85-bj2gj\" (UID: \"3e876467-fd1a-4b4c-b62b-d1641400a756\") " pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-bj2gj" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.085292 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwrkp\" (UniqueName: \"kubernetes.io/projected/c90f297d-af70-423f-b34d-8b3599ba12eb-kube-api-access-lwrkp\") pod \"barbican-operator-controller-manager-6c675fb79f-hz7qr\" (UID: \"c90f297d-af70-423f-b34d-8b3599ba12eb\") " pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-hz7qr" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.085434 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-599898f689-5c9bs"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.086749 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-jqgrz"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.087232 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-599898f689-5c9bs" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.088841 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-8ssfw" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.099820 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-599898f689-5c9bs"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.099879 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6769b867d9-k8snv"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.100743 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-k8snv" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.104192 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-pv4p7" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.108752 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-46tjg" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.117554 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6769b867d9-k8snv"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.177735 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.178918 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.184839 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.185184 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-nbzjp" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.186386 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bpkq\" (UniqueName: \"kubernetes.io/projected/b14b68d1-483a-419c-b696-a915c6d25d09-kube-api-access-8bpkq\") pod \"heat-operator-controller-manager-599898f689-5c9bs\" (UID: \"b14b68d1-483a-419c-b696-a915c6d25d09\") " pod="openstack-operators/heat-operator-controller-manager-599898f689-5c9bs" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.186436 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjvtb\" (UniqueName: \"kubernetes.io/projected/3e876467-fd1a-4b4c-b62b-d1641400a756-kube-api-access-gjvtb\") pod \"cinder-operator-controller-manager-79d68d6c85-bj2gj\" (UID: \"3e876467-fd1a-4b4c-b62b-d1641400a756\") " pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-bj2gj" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.186456 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwrkp\" (UniqueName: \"kubernetes.io/projected/c90f297d-af70-423f-b34d-8b3599ba12eb-kube-api-access-lwrkp\") pod \"barbican-operator-controller-manager-6c675fb79f-hz7qr\" (UID: \"c90f297d-af70-423f-b34d-8b3599ba12eb\") " pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-hz7qr" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.186477 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd2nv\" (UniqueName: \"kubernetes.io/projected/b32f9b3e-72a8-4229-9715-8fdd98877a04-kube-api-access-rd2nv\") pod \"glance-operator-controller-manager-846dff85b5-mhkqv\" (UID: \"b32f9b3e-72a8-4229-9715-8fdd98877a04\") " pod="openstack-operators/glance-operator-controller-manager-846dff85b5-mhkqv" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.186538 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnwnw\" (UniqueName: \"kubernetes.io/projected/ba75f5b9-b92b-4cd7-98c9-1bcf6b772940-kube-api-access-pnwnw\") pod \"designate-operator-controller-manager-75dfd9b554-jqgrz\" (UID: \"ba75f5b9-b92b-4cd7-98c9-1bcf6b772940\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-jqgrz" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.235650 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjvtb\" (UniqueName: \"kubernetes.io/projected/3e876467-fd1a-4b4c-b62b-d1641400a756-kube-api-access-gjvtb\") pod \"cinder-operator-controller-manager-79d68d6c85-bj2gj\" (UID: \"3e876467-fd1a-4b4c-b62b-d1641400a756\") " pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-bj2gj" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.250962 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-84bc9db6cc-hpqjs"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.252078 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-hpqjs" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.263542 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwrkp\" (UniqueName: \"kubernetes.io/projected/c90f297d-af70-423f-b34d-8b3599ba12eb-kube-api-access-lwrkp\") pod \"barbican-operator-controller-manager-6c675fb79f-hz7qr\" (UID: \"c90f297d-af70-423f-b34d-8b3599ba12eb\") " pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-hz7qr" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.287236 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-c75nc" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.291281 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7f55849f88-qxxgx"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.292268 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-qxxgx" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.300123 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-p7hzp" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.309134 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.312176 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnwnw\" (UniqueName: \"kubernetes.io/projected/ba75f5b9-b92b-4cd7-98c9-1bcf6b772940-kube-api-access-pnwnw\") pod \"designate-operator-controller-manager-75dfd9b554-jqgrz\" (UID: \"ba75f5b9-b92b-4cd7-98c9-1bcf6b772940\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-jqgrz" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.312280 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bpkq\" (UniqueName: \"kubernetes.io/projected/b14b68d1-483a-419c-b696-a915c6d25d09-kube-api-access-8bpkq\") pod \"heat-operator-controller-manager-599898f689-5c9bs\" (UID: \"b14b68d1-483a-419c-b696-a915c6d25d09\") " pod="openstack-operators/heat-operator-controller-manager-599898f689-5c9bs" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.312334 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qx54b\" (UniqueName: \"kubernetes.io/projected/2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919-kube-api-access-qx54b\") pod \"infra-operator-controller-manager-5fbf469cd7-9qcj2\" (UID: \"2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.312398 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd2nv\" (UniqueName: \"kubernetes.io/projected/b32f9b3e-72a8-4229-9715-8fdd98877a04-kube-api-access-rd2nv\") pod \"glance-operator-controller-manager-846dff85b5-mhkqv\" (UID: \"b32f9b3e-72a8-4229-9715-8fdd98877a04\") " pod="openstack-operators/glance-operator-controller-manager-846dff85b5-mhkqv" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.312506 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919-cert\") pod \"infra-operator-controller-manager-5fbf469cd7-9qcj2\" (UID: \"2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.312569 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lq24g\" (UniqueName: \"kubernetes.io/projected/a8895f13-915f-45f7-8156-43a7f11ac9bb-kube-api-access-lq24g\") pod \"horizon-operator-controller-manager-6769b867d9-k8snv\" (UID: \"a8895f13-915f-45f7-8156-43a7f11ac9bb\") " pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-k8snv" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.312744 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-hz7qr" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.315649 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7f55849f88-qxxgx"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.330933 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-84bc9db6cc-hpqjs"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.359337 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-trrmd"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.360604 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-trrmd" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.367192 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-b2t9g" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.373975 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnwnw\" (UniqueName: \"kubernetes.io/projected/ba75f5b9-b92b-4cd7-98c9-1bcf6b772940-kube-api-access-pnwnw\") pod \"designate-operator-controller-manager-75dfd9b554-jqgrz\" (UID: \"ba75f5b9-b92b-4cd7-98c9-1bcf6b772940\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-jqgrz" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.375224 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd2nv\" (UniqueName: \"kubernetes.io/projected/b32f9b3e-72a8-4229-9715-8fdd98877a04-kube-api-access-rd2nv\") pod \"glance-operator-controller-manager-846dff85b5-mhkqv\" (UID: \"b32f9b3e-72a8-4229-9715-8fdd98877a04\") " pod="openstack-operators/glance-operator-controller-manager-846dff85b5-mhkqv" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.377948 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6fd6854b49-hwhx2"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.379248 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-hwhx2" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.379274 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-bj2gj" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.380530 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-trrmd"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.394645 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6574bf987d-dglwx"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.395846 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-dglwx" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.416760 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bpkq\" (UniqueName: \"kubernetes.io/projected/b14b68d1-483a-419c-b696-a915c6d25d09-kube-api-access-8bpkq\") pod \"heat-operator-controller-manager-599898f689-5c9bs\" (UID: \"b14b68d1-483a-419c-b696-a915c6d25d09\") " pod="openstack-operators/heat-operator-controller-manager-599898f689-5c9bs" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.417297 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-tccqp" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.417541 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-msx2c" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.419097 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4m95w\" (UniqueName: \"kubernetes.io/projected/110672ad-3117-4a7c-8614-f12ab626e28c-kube-api-access-4m95w\") pod \"mariadb-operator-controller-manager-5c468bf4d4-trrmd\" (UID: \"110672ad-3117-4a7c-8614-f12ab626e28c\") " pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-trrmd" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.419158 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919-cert\") pod \"infra-operator-controller-manager-5fbf469cd7-9qcj2\" (UID: \"2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.419194 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lq24g\" (UniqueName: \"kubernetes.io/projected/a8895f13-915f-45f7-8156-43a7f11ac9bb-kube-api-access-lq24g\") pod \"horizon-operator-controller-manager-6769b867d9-k8snv\" (UID: \"a8895f13-915f-45f7-8156-43a7f11ac9bb\") " pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-k8snv" Oct 03 08:53:52 crc kubenswrapper[4899]: E1003 08:53:52.419535 4899 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 03 08:53:52 crc kubenswrapper[4899]: E1003 08:53:52.419605 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919-cert podName:2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919 nodeName:}" failed. No retries permitted until 2025-10-03 08:53:52.919583108 +0000 UTC m=+807.027068141 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919-cert") pod "infra-operator-controller-manager-5fbf469cd7-9qcj2" (UID: "2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919") : secret "infra-operator-webhook-server-cert" not found Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.423499 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-jqgrz" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.425730 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6574bf987d-dglwx"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.431056 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-59d6cfdf45-7zf22"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.432029 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-7zf22" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.419245 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gb6vp\" (UniqueName: \"kubernetes.io/projected/39eb57f7-d61f-4445-aea3-6b96585c4f76-kube-api-access-gb6vp\") pod \"ironic-operator-controller-manager-84bc9db6cc-hpqjs\" (UID: \"39eb57f7-d61f-4445-aea3-6b96585c4f76\") " pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-hpqjs" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.432684 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtkqm\" (UniqueName: \"kubernetes.io/projected/d8d28854-8e4e-47cd-847a-c58811fb4f91-kube-api-access-wtkqm\") pod \"keystone-operator-controller-manager-7f55849f88-qxxgx\" (UID: \"d8d28854-8e4e-47cd-847a-c58811fb4f91\") " pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-qxxgx" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.433089 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qx54b\" (UniqueName: \"kubernetes.io/projected/2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919-kube-api-access-qx54b\") pod \"infra-operator-controller-manager-5fbf469cd7-9qcj2\" (UID: \"2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.438656 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-7rvl2" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.458530 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-mhkqv" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.467566 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6fd6854b49-hwhx2"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.485917 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lq24g\" (UniqueName: \"kubernetes.io/projected/a8895f13-915f-45f7-8156-43a7f11ac9bb-kube-api-access-lq24g\") pod \"horizon-operator-controller-manager-6769b867d9-k8snv\" (UID: \"a8895f13-915f-45f7-8156-43a7f11ac9bb\") " pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-k8snv" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.485988 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-555c7456bd-78vk7"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.487103 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-78vk7" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.491130 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-6g2fz" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.497693 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qx54b\" (UniqueName: \"kubernetes.io/projected/2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919-kube-api-access-qx54b\") pod \"infra-operator-controller-manager-5fbf469cd7-9qcj2\" (UID: \"2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.498018 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-599898f689-5c9bs" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.510972 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-k8snv" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.548963 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ppcl\" (UniqueName: \"kubernetes.io/projected/de3ec379-fb48-440a-8502-3650db78804a-kube-api-access-5ppcl\") pod \"octavia-operator-controller-manager-59d6cfdf45-7zf22\" (UID: \"de3ec379-fb48-440a-8502-3650db78804a\") " pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-7zf22" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.549023 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gb6vp\" (UniqueName: \"kubernetes.io/projected/39eb57f7-d61f-4445-aea3-6b96585c4f76-kube-api-access-gb6vp\") pod \"ironic-operator-controller-manager-84bc9db6cc-hpqjs\" (UID: \"39eb57f7-d61f-4445-aea3-6b96585c4f76\") " pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-hpqjs" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.549048 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtkqm\" (UniqueName: \"kubernetes.io/projected/d8d28854-8e4e-47cd-847a-c58811fb4f91-kube-api-access-wtkqm\") pod \"keystone-operator-controller-manager-7f55849f88-qxxgx\" (UID: \"d8d28854-8e4e-47cd-847a-c58811fb4f91\") " pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-qxxgx" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.549134 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld4gv\" (UniqueName: \"kubernetes.io/projected/f500aadc-0447-4d26-9ab4-83f64b084a89-kube-api-access-ld4gv\") pod \"neutron-operator-controller-manager-6574bf987d-dglwx\" (UID: \"f500aadc-0447-4d26-9ab4-83f64b084a89\") " pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-dglwx" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.549162 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tt7m5\" (UniqueName: \"kubernetes.io/projected/20ecf87a-f08f-4d2f-92fd-ba14a9a9e5b2-kube-api-access-tt7m5\") pod \"nova-operator-controller-manager-555c7456bd-78vk7\" (UID: \"20ecf87a-f08f-4d2f-92fd-ba14a9a9e5b2\") " pod="openstack-operators/nova-operator-controller-manager-555c7456bd-78vk7" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.549198 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prblv\" (UniqueName: \"kubernetes.io/projected/ed44541c-bb31-43bb-92eb-298b01820505-kube-api-access-prblv\") pod \"manila-operator-controller-manager-6fd6854b49-hwhx2\" (UID: \"ed44541c-bb31-43bb-92eb-298b01820505\") " pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-hwhx2" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.549224 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4m95w\" (UniqueName: \"kubernetes.io/projected/110672ad-3117-4a7c-8614-f12ab626e28c-kube-api-access-4m95w\") pod \"mariadb-operator-controller-manager-5c468bf4d4-trrmd\" (UID: \"110672ad-3117-4a7c-8614-f12ab626e28c\") " pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-trrmd" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.581469 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-59d6cfdf45-7zf22"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.581515 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-555c7456bd-78vk7"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.581533 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-688db7b6c7-7psz6"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.584555 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-688db7b6c7-7psz6"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.584680 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-7psz6" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.626946 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-7d8bb7f44c-bnb75"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.629012 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-bnb75" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.675123 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-pnvct" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.676518 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-f7lj4" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.678456 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld4gv\" (UniqueName: \"kubernetes.io/projected/f500aadc-0447-4d26-9ab4-83f64b084a89-kube-api-access-ld4gv\") pod \"neutron-operator-controller-manager-6574bf987d-dglwx\" (UID: \"f500aadc-0447-4d26-9ab4-83f64b084a89\") " pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-dglwx" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.678530 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tt7m5\" (UniqueName: \"kubernetes.io/projected/20ecf87a-f08f-4d2f-92fd-ba14a9a9e5b2-kube-api-access-tt7m5\") pod \"nova-operator-controller-manager-555c7456bd-78vk7\" (UID: \"20ecf87a-f08f-4d2f-92fd-ba14a9a9e5b2\") " pod="openstack-operators/nova-operator-controller-manager-555c7456bd-78vk7" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.678581 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prblv\" (UniqueName: \"kubernetes.io/projected/ed44541c-bb31-43bb-92eb-298b01820505-kube-api-access-prblv\") pod \"manila-operator-controller-manager-6fd6854b49-hwhx2\" (UID: \"ed44541c-bb31-43bb-92eb-298b01820505\") " pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-hwhx2" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.678673 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t922l\" (UniqueName: \"kubernetes.io/projected/2cbb69db-51ad-471c-be3a-57b9422f11cd-kube-api-access-t922l\") pod \"ovn-operator-controller-manager-688db7b6c7-7psz6\" (UID: \"2cbb69db-51ad-471c-be3a-57b9422f11cd\") " pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-7psz6" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.678846 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ppcl\" (UniqueName: \"kubernetes.io/projected/de3ec379-fb48-440a-8502-3650db78804a-kube-api-access-5ppcl\") pod \"octavia-operator-controller-manager-59d6cfdf45-7zf22\" (UID: \"de3ec379-fb48-440a-8502-3650db78804a\") " pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-7zf22" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.698808 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gb6vp\" (UniqueName: \"kubernetes.io/projected/39eb57f7-d61f-4445-aea3-6b96585c4f76-kube-api-access-gb6vp\") pod \"ironic-operator-controller-manager-84bc9db6cc-hpqjs\" (UID: \"39eb57f7-d61f-4445-aea3-6b96585c4f76\") " pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-hpqjs" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.722030 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtkqm\" (UniqueName: \"kubernetes.io/projected/d8d28854-8e4e-47cd-847a-c58811fb4f91-kube-api-access-wtkqm\") pod \"keystone-operator-controller-manager-7f55849f88-qxxgx\" (UID: \"d8d28854-8e4e-47cd-847a-c58811fb4f91\") " pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-qxxgx" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.726042 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.764607 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prblv\" (UniqueName: \"kubernetes.io/projected/ed44541c-bb31-43bb-92eb-298b01820505-kube-api-access-prblv\") pod \"manila-operator-controller-manager-6fd6854b49-hwhx2\" (UID: \"ed44541c-bb31-43bb-92eb-298b01820505\") " pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-hwhx2" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.765701 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.772199 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4m95w\" (UniqueName: \"kubernetes.io/projected/110672ad-3117-4a7c-8614-f12ab626e28c-kube-api-access-4m95w\") pod \"mariadb-operator-controller-manager-5c468bf4d4-trrmd\" (UID: \"110672ad-3117-4a7c-8614-f12ab626e28c\") " pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-trrmd" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.772809 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ppcl\" (UniqueName: \"kubernetes.io/projected/de3ec379-fb48-440a-8502-3650db78804a-kube-api-access-5ppcl\") pod \"octavia-operator-controller-manager-59d6cfdf45-7zf22\" (UID: \"de3ec379-fb48-440a-8502-3650db78804a\") " pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-7zf22" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.782682 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-hwhx2" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.783160 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.788650 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h764p\" (UniqueName: \"kubernetes.io/projected/6008780d-5be3-4fda-8526-594566364ae4-kube-api-access-h764p\") pod \"placement-operator-controller-manager-7d8bb7f44c-bnb75\" (UID: \"6008780d-5be3-4fda-8526-594566364ae4\") " pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-bnb75" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.788757 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t922l\" (UniqueName: \"kubernetes.io/projected/2cbb69db-51ad-471c-be3a-57b9422f11cd-kube-api-access-t922l\") pod \"ovn-operator-controller-manager-688db7b6c7-7psz6\" (UID: \"2cbb69db-51ad-471c-be3a-57b9422f11cd\") " pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-7psz6" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.790111 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld4gv\" (UniqueName: \"kubernetes.io/projected/f500aadc-0447-4d26-9ab4-83f64b084a89-kube-api-access-ld4gv\") pod \"neutron-operator-controller-manager-6574bf987d-dglwx\" (UID: \"f500aadc-0447-4d26-9ab4-83f64b084a89\") " pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-dglwx" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.791455 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-7d8bb7f44c-bnb75"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.798465 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-tfwjf" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.809090 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.821979 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tt7m5\" (UniqueName: \"kubernetes.io/projected/20ecf87a-f08f-4d2f-92fd-ba14a9a9e5b2-kube-api-access-tt7m5\") pod \"nova-operator-controller-manager-555c7456bd-78vk7\" (UID: \"20ecf87a-f08f-4d2f-92fd-ba14a9a9e5b2\") " pod="openstack-operators/nova-operator-controller-manager-555c7456bd-78vk7" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.824873 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-f5mgb"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.826163 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-f5mgb" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.835511 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zvwbw"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.836623 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zvwbw" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.840788 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-xlvsd" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.841048 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-lpwr4"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.842537 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-jgn9c" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.842760 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-lpwr4" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.845420 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-w6pwz" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.845744 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t922l\" (UniqueName: \"kubernetes.io/projected/2cbb69db-51ad-471c-be3a-57b9422f11cd-kube-api-access-t922l\") pod \"ovn-operator-controller-manager-688db7b6c7-7psz6\" (UID: \"2cbb69db-51ad-471c-be3a-57b9422f11cd\") " pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-7psz6" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.850407 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-dglwx" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.850427 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gbhrz"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.857638 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-f5mgb"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.865496 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-lpwr4"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.869522 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-fcd7d9895-lxmmw"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.871504 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-lxmmw" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.874526 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-fcd7d9895-lxmmw"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.876422 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-4wznj" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.878530 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zvwbw"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.885275 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-7zf22" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.889582 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n4t8\" (UniqueName: \"kubernetes.io/projected/03abda9b-2057-42c8-8161-4104ecb96027-kube-api-access-8n4t8\") pod \"test-operator-controller-manager-5cd5cb47d7-lpwr4\" (UID: \"03abda9b-2057-42c8-8161-4104ecb96027\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-lpwr4" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.889695 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkhmj\" (UniqueName: \"kubernetes.io/projected/837b07a9-5832-4d01-b257-ac3fca82b121-kube-api-access-qkhmj\") pod \"swift-operator-controller-manager-6859f9b676-f5mgb\" (UID: \"837b07a9-5832-4d01-b257-ac3fca82b121\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-f5mgb" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.889733 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/62eda81d-d797-4ed4-9687-9cdc7c49decb-cert\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg\" (UID: \"62eda81d-d797-4ed4-9687-9cdc7c49decb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.889783 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h764p\" (UniqueName: \"kubernetes.io/projected/6008780d-5be3-4fda-8526-594566364ae4-kube-api-access-h764p\") pod \"placement-operator-controller-manager-7d8bb7f44c-bnb75\" (UID: \"6008780d-5be3-4fda-8526-594566364ae4\") " pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-bnb75" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.889816 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfcfb\" (UniqueName: \"kubernetes.io/projected/6c6cb9a6-eacf-411e-8c19-ac8ee51eced8-kube-api-access-bfcfb\") pod \"telemetry-operator-controller-manager-5db5cf686f-zvwbw\" (UID: \"6c6cb9a6-eacf-411e-8c19-ac8ee51eced8\") " pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zvwbw" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.889839 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s826s\" (UniqueName: \"kubernetes.io/projected/62eda81d-d797-4ed4-9687-9cdc7c49decb-kube-api-access-s826s\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg\" (UID: \"62eda81d-d797-4ed4-9687-9cdc7c49decb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.889863 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrhgq\" (UniqueName: \"kubernetes.io/projected/7496bf16-1fc1-44ec-b96b-e75e00652634-kube-api-access-qrhgq\") pod \"watcher-operator-controller-manager-fcd7d9895-lxmmw\" (UID: \"7496bf16-1fc1-44ec-b96b-e75e00652634\") " pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-lxmmw" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.927377 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-hpqjs" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.938397 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-qxxgx" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.941415 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c"] Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.942727 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.953729 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.954014 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-6725c" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.955726 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h764p\" (UniqueName: \"kubernetes.io/projected/6008780d-5be3-4fda-8526-594566364ae4-kube-api-access-h764p\") pod \"placement-operator-controller-manager-7d8bb7f44c-bnb75\" (UID: \"6008780d-5be3-4fda-8526-594566364ae4\") " pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-bnb75" Oct 03 08:53:52 crc kubenswrapper[4899]: I1003 08:53:52.976288 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c"] Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.002114 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfcfb\" (UniqueName: \"kubernetes.io/projected/6c6cb9a6-eacf-411e-8c19-ac8ee51eced8-kube-api-access-bfcfb\") pod \"telemetry-operator-controller-manager-5db5cf686f-zvwbw\" (UID: \"6c6cb9a6-eacf-411e-8c19-ac8ee51eced8\") " pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zvwbw" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.002159 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s826s\" (UniqueName: \"kubernetes.io/projected/62eda81d-d797-4ed4-9687-9cdc7c49decb-kube-api-access-s826s\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg\" (UID: \"62eda81d-d797-4ed4-9687-9cdc7c49decb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.002178 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrhgq\" (UniqueName: \"kubernetes.io/projected/7496bf16-1fc1-44ec-b96b-e75e00652634-kube-api-access-qrhgq\") pod \"watcher-operator-controller-manager-fcd7d9895-lxmmw\" (UID: \"7496bf16-1fc1-44ec-b96b-e75e00652634\") " pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-lxmmw" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.002208 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919-cert\") pod \"infra-operator-controller-manager-5fbf469cd7-9qcj2\" (UID: \"2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.002261 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n4t8\" (UniqueName: \"kubernetes.io/projected/03abda9b-2057-42c8-8161-4104ecb96027-kube-api-access-8n4t8\") pod \"test-operator-controller-manager-5cd5cb47d7-lpwr4\" (UID: \"03abda9b-2057-42c8-8161-4104ecb96027\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-lpwr4" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.002293 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkhmj\" (UniqueName: \"kubernetes.io/projected/837b07a9-5832-4d01-b257-ac3fca82b121-kube-api-access-qkhmj\") pod \"swift-operator-controller-manager-6859f9b676-f5mgb\" (UID: \"837b07a9-5832-4d01-b257-ac3fca82b121\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-f5mgb" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.002316 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/62eda81d-d797-4ed4-9687-9cdc7c49decb-cert\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg\" (UID: \"62eda81d-d797-4ed4-9687-9cdc7c49decb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.002348 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2bb995f5-5432-40fc-a196-71cac18de666-cert\") pod \"openstack-operator-controller-manager-7cfd4b6679-jn88c\" (UID: \"2bb995f5-5432-40fc-a196-71cac18de666\") " pod="openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.002367 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzvqr\" (UniqueName: \"kubernetes.io/projected/2bb995f5-5432-40fc-a196-71cac18de666-kube-api-access-jzvqr\") pod \"openstack-operator-controller-manager-7cfd4b6679-jn88c\" (UID: \"2bb995f5-5432-40fc-a196-71cac18de666\") " pod="openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c" Oct 03 08:53:53 crc kubenswrapper[4899]: E1003 08:53:53.002989 4899 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 03 08:53:53 crc kubenswrapper[4899]: E1003 08:53:53.003036 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/62eda81d-d797-4ed4-9687-9cdc7c49decb-cert podName:62eda81d-d797-4ed4-9687-9cdc7c49decb nodeName:}" failed. No retries permitted until 2025-10-03 08:53:53.503018604 +0000 UTC m=+807.610503557 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/62eda81d-d797-4ed4-9687-9cdc7c49decb-cert") pod "openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg" (UID: "62eda81d-d797-4ed4-9687-9cdc7c49decb") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.009877 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919-cert\") pod \"infra-operator-controller-manager-5fbf469cd7-9qcj2\" (UID: \"2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.014912 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-78vk7" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.031038 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-n8q4j"] Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.031650 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrhgq\" (UniqueName: \"kubernetes.io/projected/7496bf16-1fc1-44ec-b96b-e75e00652634-kube-api-access-qrhgq\") pod \"watcher-operator-controller-manager-fcd7d9895-lxmmw\" (UID: \"7496bf16-1fc1-44ec-b96b-e75e00652634\") " pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-lxmmw" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.032147 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-n8q4j" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.032300 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-trrmd" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.035902 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkhmj\" (UniqueName: \"kubernetes.io/projected/837b07a9-5832-4d01-b257-ac3fca82b121-kube-api-access-qkhmj\") pod \"swift-operator-controller-manager-6859f9b676-f5mgb\" (UID: \"837b07a9-5832-4d01-b257-ac3fca82b121\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-f5mgb" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.045963 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfcfb\" (UniqueName: \"kubernetes.io/projected/6c6cb9a6-eacf-411e-8c19-ac8ee51eced8-kube-api-access-bfcfb\") pod \"telemetry-operator-controller-manager-5db5cf686f-zvwbw\" (UID: \"6c6cb9a6-eacf-411e-8c19-ac8ee51eced8\") " pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zvwbw" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.053320 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n4t8\" (UniqueName: \"kubernetes.io/projected/03abda9b-2057-42c8-8161-4104ecb96027-kube-api-access-8n4t8\") pod \"test-operator-controller-manager-5cd5cb47d7-lpwr4\" (UID: \"03abda9b-2057-42c8-8161-4104ecb96027\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-lpwr4" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.053640 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s826s\" (UniqueName: \"kubernetes.io/projected/62eda81d-d797-4ed4-9687-9cdc7c49decb-kube-api-access-s826s\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg\" (UID: \"62eda81d-d797-4ed4-9687-9cdc7c49decb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.054217 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-qbqsg" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.062960 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-n8q4j"] Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.088081 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-7psz6" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.105423 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2bb995f5-5432-40fc-a196-71cac18de666-cert\") pod \"openstack-operator-controller-manager-7cfd4b6679-jn88c\" (UID: \"2bb995f5-5432-40fc-a196-71cac18de666\") " pod="openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.105465 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzvqr\" (UniqueName: \"kubernetes.io/projected/2bb995f5-5432-40fc-a196-71cac18de666-kube-api-access-jzvqr\") pod \"openstack-operator-controller-manager-7cfd4b6679-jn88c\" (UID: \"2bb995f5-5432-40fc-a196-71cac18de666\") " pod="openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.105540 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mbrj\" (UniqueName: \"kubernetes.io/projected/5c4913b3-fd20-4eee-99df-1900f5486f51-kube-api-access-8mbrj\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-n8q4j\" (UID: \"5c4913b3-fd20-4eee-99df-1900f5486f51\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-n8q4j" Oct 03 08:53:53 crc kubenswrapper[4899]: E1003 08:53:53.105666 4899 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 03 08:53:53 crc kubenswrapper[4899]: E1003 08:53:53.105745 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2bb995f5-5432-40fc-a196-71cac18de666-cert podName:2bb995f5-5432-40fc-a196-71cac18de666 nodeName:}" failed. No retries permitted until 2025-10-03 08:53:53.605722469 +0000 UTC m=+807.713207432 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2bb995f5-5432-40fc-a196-71cac18de666-cert") pod "openstack-operator-controller-manager-7cfd4b6679-jn88c" (UID: "2bb995f5-5432-40fc-a196-71cac18de666") : secret "webhook-server-cert" not found Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.131359 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzvqr\" (UniqueName: \"kubernetes.io/projected/2bb995f5-5432-40fc-a196-71cac18de666-kube-api-access-jzvqr\") pod \"openstack-operator-controller-manager-7cfd4b6679-jn88c\" (UID: \"2bb995f5-5432-40fc-a196-71cac18de666\") " pod="openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.136874 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.147519 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-bnb75" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.208084 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mbrj\" (UniqueName: \"kubernetes.io/projected/5c4913b3-fd20-4eee-99df-1900f5486f51-kube-api-access-8mbrj\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-n8q4j\" (UID: \"5c4913b3-fd20-4eee-99df-1900f5486f51\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-n8q4j" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.225451 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-f5mgb" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.231376 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zvwbw" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.236117 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mbrj\" (UniqueName: \"kubernetes.io/projected/5c4913b3-fd20-4eee-99df-1900f5486f51-kube-api-access-8mbrj\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-n8q4j\" (UID: \"5c4913b3-fd20-4eee-99df-1900f5486f51\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-n8q4j" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.300100 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-lpwr4" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.331885 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-lxmmw" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.364209 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-n8q4j" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.425973 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-79d68d6c85-bj2gj"] Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.432371 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6c675fb79f-hz7qr"] Oct 03 08:53:53 crc kubenswrapper[4899]: W1003 08:53:53.470809 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3e876467_fd1a_4b4c_b62b_d1641400a756.slice/crio-377c62e7cb4f93543c08c267c9ebaac3ad2b6ef3868df4fe1ce0a41bc7072378 WatchSource:0}: Error finding container 377c62e7cb4f93543c08c267c9ebaac3ad2b6ef3868df4fe1ce0a41bc7072378: Status 404 returned error can't find the container with id 377c62e7cb4f93543c08c267c9ebaac3ad2b6ef3868df4fe1ce0a41bc7072378 Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.513427 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/62eda81d-d797-4ed4-9687-9cdc7c49decb-cert\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg\" (UID: \"62eda81d-d797-4ed4-9687-9cdc7c49decb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.517925 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/62eda81d-d797-4ed4-9687-9cdc7c49decb-cert\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg\" (UID: \"62eda81d-d797-4ed4-9687-9cdc7c49decb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg" Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.587945 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-hz7qr" event={"ID":"c90f297d-af70-423f-b34d-8b3599ba12eb","Type":"ContainerStarted","Data":"4d425cadf43d2ad66b9ccb4086209d982f703e9c47fe9a3850dd7f00ef53e780"} Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.598790 4899 generic.go:334] "Generic (PLEG): container finished" podID="eaeb11f4-25fe-42cc-8bf4-0d6c93803c16" containerID="86eb7c935837bf6427ff83a227122c8824bcf4bf8bd2dfdcad2618566179863a" exitCode=0 Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.598850 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gbhrz" event={"ID":"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16","Type":"ContainerDied","Data":"86eb7c935837bf6427ff83a227122c8824bcf4bf8bd2dfdcad2618566179863a"} Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.598873 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gbhrz" event={"ID":"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16","Type":"ContainerStarted","Data":"ca86fa9458587b528bfd5fbd222c61946e3923f605c8b9ba15971d1feb510ca7"} Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.605116 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-bj2gj" event={"ID":"3e876467-fd1a-4b4c-b62b-d1641400a756","Type":"ContainerStarted","Data":"377c62e7cb4f93543c08c267c9ebaac3ad2b6ef3868df4fe1ce0a41bc7072378"} Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.615530 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2bb995f5-5432-40fc-a196-71cac18de666-cert\") pod \"openstack-operator-controller-manager-7cfd4b6679-jn88c\" (UID: \"2bb995f5-5432-40fc-a196-71cac18de666\") " pod="openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c" Oct 03 08:53:53 crc kubenswrapper[4899]: E1003 08:53:53.615756 4899 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 03 08:53:53 crc kubenswrapper[4899]: E1003 08:53:53.615816 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2bb995f5-5432-40fc-a196-71cac18de666-cert podName:2bb995f5-5432-40fc-a196-71cac18de666 nodeName:}" failed. No retries permitted until 2025-10-03 08:53:54.615795524 +0000 UTC m=+808.723280477 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2bb995f5-5432-40fc-a196-71cac18de666-cert") pod "openstack-operator-controller-manager-7cfd4b6679-jn88c" (UID: "2bb995f5-5432-40fc-a196-71cac18de666") : secret "webhook-server-cert" not found Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.736102 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-jqgrz"] Oct 03 08:53:53 crc kubenswrapper[4899]: I1003 08:53:53.766697 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg" Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.100332 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6fd6854b49-hwhx2"] Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.119077 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7f55849f88-qxxgx"] Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.129287 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-599898f689-5c9bs"] Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.149568 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6574bf987d-dglwx"] Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.161201 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-846dff85b5-mhkqv"] Oct 03 08:53:54 crc kubenswrapper[4899]: W1003 08:53:54.166207 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb14b68d1_483a_419c_b696_a915c6d25d09.slice/crio-9d1fcc78364641ec5b6388ff6bf5dceae385316e41fee7642934d5258b11308c WatchSource:0}: Error finding container 9d1fcc78364641ec5b6388ff6bf5dceae385316e41fee7642934d5258b11308c: Status 404 returned error can't find the container with id 9d1fcc78364641ec5b6388ff6bf5dceae385316e41fee7642934d5258b11308c Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.169587 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-84bc9db6cc-hpqjs"] Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.180720 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6769b867d9-k8snv"] Oct 03 08:53:54 crc kubenswrapper[4899]: W1003 08:53:54.196285 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb32f9b3e_72a8_4229_9715_8fdd98877a04.slice/crio-b3975cf2e9adb780f6af46f37127f4167917d9c0d91dfc23c954ab15d9f3f0c8 WatchSource:0}: Error finding container b3975cf2e9adb780f6af46f37127f4167917d9c0d91dfc23c954ab15d9f3f0c8: Status 404 returned error can't find the container with id b3975cf2e9adb780f6af46f37127f4167917d9c0d91dfc23c954ab15d9f3f0c8 Oct 03 08:53:54 crc kubenswrapper[4899]: W1003 08:53:54.201368 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda8895f13_915f_45f7_8156_43a7f11ac9bb.slice/crio-f40f172485bb0e275e842a5fb6734d4dcc4245ec16663c7225a088952c669234 WatchSource:0}: Error finding container f40f172485bb0e275e842a5fb6734d4dcc4245ec16663c7225a088952c669234: Status 404 returned error can't find the container with id f40f172485bb0e275e842a5fb6734d4dcc4245ec16663c7225a088952c669234 Oct 03 08:53:54 crc kubenswrapper[4899]: W1003 08:53:54.274217 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podde3ec379_fb48_440a_8502_3650db78804a.slice/crio-d3585ab9fce9ff768b7332e945d5ce133b704cf7c6e981ae2a6f4d6f4cac4a4b WatchSource:0}: Error finding container d3585ab9fce9ff768b7332e945d5ce133b704cf7c6e981ae2a6f4d6f4cac4a4b: Status 404 returned error can't find the container with id d3585ab9fce9ff768b7332e945d5ce133b704cf7c6e981ae2a6f4d6f4cac4a4b Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.276451 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-59d6cfdf45-7zf22"] Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.478203 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2"] Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.500058 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-555c7456bd-78vk7"] Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.514183 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-7d8bb7f44c-bnb75"] Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.521760 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zvwbw"] Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.548533 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-trrmd"] Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.548558 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-n8q4j"] Oct 03 08:53:54 crc kubenswrapper[4899]: E1003 08:53:54.551178 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:725da67b3f9cf2758564e0111928cdd570c0f6f1ca34775f159bbe94deb82548,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h764p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-7d8bb7f44c-bnb75_openstack-operators(6008780d-5be3-4fda-8526-594566364ae4): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.551704 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-f5mgb"] Oct 03 08:53:54 crc kubenswrapper[4899]: W1003 08:53:54.553995 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2a9a8ff5_a009_4d5f_bc3a_8e449b5fd919.slice/crio-a9649f1ad1e6cc405565ced2e50fb6491fc6810a11e99e0446c00a8728760f26 WatchSource:0}: Error finding container a9649f1ad1e6cc405565ced2e50fb6491fc6810a11e99e0446c00a8728760f26: Status 404 returned error can't find the container with id a9649f1ad1e6cc405565ced2e50fb6491fc6810a11e99e0446c00a8728760f26 Oct 03 08:53:54 crc kubenswrapper[4899]: E1003 08:53:54.555669 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:40fb1819b6639807b77ef79448d35f1e4bfc1838a09d4f380e9fa0f755352475,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qx54b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-5fbf469cd7-9qcj2_openstack-operators(2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 08:53:54 crc kubenswrapper[4899]: W1003 08:53:54.556464 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2cbb69db_51ad_471c_be3a_57b9422f11cd.slice/crio-be96df7eeb4c1aae2df989ddc204c2b29d52e881dfa63ea947fb7316d2ac3dd1 WatchSource:0}: Error finding container be96df7eeb4c1aae2df989ddc204c2b29d52e881dfa63ea947fb7316d2ac3dd1: Status 404 returned error can't find the container with id be96df7eeb4c1aae2df989ddc204c2b29d52e881dfa63ea947fb7316d2ac3dd1 Oct 03 08:53:54 crc kubenswrapper[4899]: W1003 08:53:54.557448 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod837b07a9_5832_4d01_b257_ac3fca82b121.slice/crio-e14b621ed419d9f54c15671a6f8f5eb20accfd9ac0e10dd692ee8c1a5c0c010b WatchSource:0}: Error finding container e14b621ed419d9f54c15671a6f8f5eb20accfd9ac0e10dd692ee8c1a5c0c010b: Status 404 returned error can't find the container with id e14b621ed419d9f54c15671a6f8f5eb20accfd9ac0e10dd692ee8c1a5c0c010b Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.558610 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-688db7b6c7-7psz6"] Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.564165 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-fcd7d9895-lxmmw"] Oct 03 08:53:54 crc kubenswrapper[4899]: E1003 08:53:54.565400 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qkhmj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-6859f9b676-f5mgb_openstack-operators(837b07a9-5832-4d01-b257-ac3fca82b121): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 08:53:54 crc kubenswrapper[4899]: E1003 08:53:54.566562 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:5c6ab93b78bd20eb7f1736751a59c1eb33fb06351339563dbefe49ccaaff6e94,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-t922l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-688db7b6c7-7psz6_openstack-operators(2cbb69db-51ad-471c-be3a-57b9422f11cd): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.568336 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-lpwr4"] Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.573046 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg"] Oct 03 08:53:54 crc kubenswrapper[4899]: E1003 08:53:54.580968 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:018151bd5ff830ec03c6b8e3d53cfb9456ca6e1e34793bdd4f7edd39a0146fa6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qrhgq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-fcd7d9895-lxmmw_openstack-operators(7496bf16-1fc1-44ec-b96b-e75e00652634): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 08:53:54 crc kubenswrapper[4899]: W1003 08:53:54.586232 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c6cb9a6_eacf_411e_8c19_ac8ee51eced8.slice/crio-5c21a3a773af9bae287faacc4c7aed781300a87d4d568df0234139eb4dc807a3 WatchSource:0}: Error finding container 5c21a3a773af9bae287faacc4c7aed781300a87d4d568df0234139eb4dc807a3: Status 404 returned error can't find the container with id 5c21a3a773af9bae287faacc4c7aed781300a87d4d568df0234139eb4dc807a3 Oct 03 08:53:54 crc kubenswrapper[4899]: E1003 08:53:54.596145 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:8f5eee2eb7b77432ef1a88ed693ff981514359dfc808581f393bcef252de5cfa,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bfcfb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-5db5cf686f-zvwbw_openstack-operators(6c6cb9a6-eacf-411e-8c19-ac8ee51eced8): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.623384 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-trrmd" event={"ID":"110672ad-3117-4a7c-8614-f12ab626e28c","Type":"ContainerStarted","Data":"9c87ab87294f7d9b52ab5dd784daefb1f50c9bb9ede1290ff63dd6cf9a349722"} Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.628549 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-hpqjs" event={"ID":"39eb57f7-d61f-4445-aea3-6b96585c4f76","Type":"ContainerStarted","Data":"de8324d0e2413b71b342fb97ea8cf5da7267580f9e6312f5e8dfcceba1199c3c"} Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.631175 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zvwbw" event={"ID":"6c6cb9a6-eacf-411e-8c19-ac8ee51eced8","Type":"ContainerStarted","Data":"5c21a3a773af9bae287faacc4c7aed781300a87d4d568df0234139eb4dc807a3"} Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.633359 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-lxmmw" event={"ID":"7496bf16-1fc1-44ec-b96b-e75e00652634","Type":"ContainerStarted","Data":"10b99eaf611821353e2a92bc7be0b1b34047283c67a81cb169ce191322afc16a"} Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.634877 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-mhkqv" event={"ID":"b32f9b3e-72a8-4229-9715-8fdd98877a04","Type":"ContainerStarted","Data":"b3975cf2e9adb780f6af46f37127f4167917d9c0d91dfc23c954ab15d9f3f0c8"} Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.637070 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-dglwx" event={"ID":"f500aadc-0447-4d26-9ab4-83f64b084a89","Type":"ContainerStarted","Data":"55dc18174df63be232e2ae33100bf5c991a89b6802325da2a7b4091ab2a41282"} Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.637386 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2bb995f5-5432-40fc-a196-71cac18de666-cert\") pod \"openstack-operator-controller-manager-7cfd4b6679-jn88c\" (UID: \"2bb995f5-5432-40fc-a196-71cac18de666\") " pod="openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c" Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.641698 4899 generic.go:334] "Generic (PLEG): container finished" podID="eaeb11f4-25fe-42cc-8bf4-0d6c93803c16" containerID="3ff8bb903c70481714ef070a58c5a04e1ad6d73b93780550e8f45aa7e60473b5" exitCode=0 Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.642497 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gbhrz" event={"ID":"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16","Type":"ContainerDied","Data":"3ff8bb903c70481714ef070a58c5a04e1ad6d73b93780550e8f45aa7e60473b5"} Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.644171 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2bb995f5-5432-40fc-a196-71cac18de666-cert\") pod \"openstack-operator-controller-manager-7cfd4b6679-jn88c\" (UID: \"2bb995f5-5432-40fc-a196-71cac18de666\") " pod="openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c" Oct 03 08:53:54 crc kubenswrapper[4899]: W1003 08:53:54.653597 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62eda81d_d797_4ed4_9687_9cdc7c49decb.slice/crio-43a6b62605c27e4c19db8faf64efdef5208da597b376d0f660c34b9c0f0e861e WatchSource:0}: Error finding container 43a6b62605c27e4c19db8faf64efdef5208da597b376d0f660c34b9c0f0e861e: Status 404 returned error can't find the container with id 43a6b62605c27e4c19db8faf64efdef5208da597b376d0f660c34b9c0f0e861e Oct 03 08:53:54 crc kubenswrapper[4899]: W1003 08:53:54.653883 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03abda9b_2057_42c8_8161_4104ecb96027.slice/crio-e7659d834b4bbed9ba998a96543dde7f978ace16e83ac1320a2261b1912b618c WatchSource:0}: Error finding container e7659d834b4bbed9ba998a96543dde7f978ace16e83ac1320a2261b1912b618c: Status 404 returned error can't find the container with id e7659d834b4bbed9ba998a96543dde7f978ace16e83ac1320a2261b1912b618c Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.656419 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-7psz6" event={"ID":"2cbb69db-51ad-471c-be3a-57b9422f11cd","Type":"ContainerStarted","Data":"be96df7eeb4c1aae2df989ddc204c2b29d52e881dfa63ea947fb7316d2ac3dd1"} Oct 03 08:53:54 crc kubenswrapper[4899]: E1003 08:53:54.663529 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad,Command:[/bin/opm],Args:[serve /extracted-catalog/catalog --cache-dir=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:GOMEMLIMIT,Value:120MiB,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{125829120 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rl9cv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-gbhrz_openshift-marketplace(eaeb11f4-25fe-42cc-8bf4-0d6c93803c16): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 08:53:54 crc kubenswrapper[4899]: E1003 08:53:54.663769 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:0daf76cc40ab619ae266b11defcc1b65beb22d859369e7b1b04de9169089a4cb,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8n4t8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5cd5cb47d7-lpwr4_openstack-operators(03abda9b-2057-42c8-8161-4104ecb96027): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 08:53:54 crc kubenswrapper[4899]: E1003 08:53:54.665132 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"pull QPS exceeded\"" pod="openshift-marketplace/community-operators-gbhrz" podUID="eaeb11f4-25fe-42cc-8bf4-0d6c93803c16" Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.665183 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-k8snv" event={"ID":"a8895f13-915f-45f7-8156-43a7f11ac9bb","Type":"ContainerStarted","Data":"f40f172485bb0e275e842a5fb6734d4dcc4245ec16663c7225a088952c669234"} Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.669189 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-7zf22" event={"ID":"de3ec379-fb48-440a-8502-3650db78804a","Type":"ContainerStarted","Data":"d3585ab9fce9ff768b7332e945d5ce133b704cf7c6e981ae2a6f4d6f4cac4a4b"} Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.673642 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-bnb75" event={"ID":"6008780d-5be3-4fda-8526-594566364ae4","Type":"ContainerStarted","Data":"2762327a08989900928a04018ae2bc843caa51990cb1206ae5fed9f5e0ad9de7"} Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.675057 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-78vk7" event={"ID":"20ecf87a-f08f-4d2f-92fd-ba14a9a9e5b2","Type":"ContainerStarted","Data":"e04d1550f9aa75fc695d24c0882585ae21707a5f88b1d12f83e55adfb928ca45"} Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.677629 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-qxxgx" event={"ID":"d8d28854-8e4e-47cd-847a-c58811fb4f91","Type":"ContainerStarted","Data":"26f2b7a3f9bd09bb58774bd804a02aa322e28ad335068ce86848795419929d10"} Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.678610 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-f5mgb" event={"ID":"837b07a9-5832-4d01-b257-ac3fca82b121","Type":"ContainerStarted","Data":"e14b621ed419d9f54c15671a6f8f5eb20accfd9ac0e10dd692ee8c1a5c0c010b"} Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.679839 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-hwhx2" event={"ID":"ed44541c-bb31-43bb-92eb-298b01820505","Type":"ContainerStarted","Data":"fa2ebad48cadfcf734ad366778f5018da7d652f97596856d93babaac4781ccb8"} Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.681653 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-599898f689-5c9bs" event={"ID":"b14b68d1-483a-419c-b696-a915c6d25d09","Type":"ContainerStarted","Data":"9d1fcc78364641ec5b6388ff6bf5dceae385316e41fee7642934d5258b11308c"} Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.685801 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2" event={"ID":"2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919","Type":"ContainerStarted","Data":"a9649f1ad1e6cc405565ced2e50fb6491fc6810a11e99e0446c00a8728760f26"} Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.687355 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-n8q4j" event={"ID":"5c4913b3-fd20-4eee-99df-1900f5486f51","Type":"ContainerStarted","Data":"cb96a585a3a3a4a1faf4256ee5ecdb76e6f1d4048d43a2006f084a9bafe117db"} Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.690377 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-jqgrz" event={"ID":"ba75f5b9-b92b-4cd7-98c9-1bcf6b772940","Type":"ContainerStarted","Data":"61a7d3e3b2713f976a477dd2afadd28d32ca368c0863051aa7c44b6b6f398101"} Oct 03 08:53:54 crc kubenswrapper[4899]: I1003 08:53:54.852502 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c" Oct 03 08:53:54 crc kubenswrapper[4899]: E1003 08:53:54.871240 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-bnb75" podUID="6008780d-5be3-4fda-8526-594566364ae4" Oct 03 08:53:55 crc kubenswrapper[4899]: E1003 08:53:54.998678 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-lpwr4" podUID="03abda9b-2057-42c8-8161-4104ecb96027" Oct 03 08:53:55 crc kubenswrapper[4899]: E1003 08:53:55.007483 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2" podUID="2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919" Oct 03 08:53:55 crc kubenswrapper[4899]: E1003 08:53:55.023499 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-lxmmw" podUID="7496bf16-1fc1-44ec-b96b-e75e00652634" Oct 03 08:53:55 crc kubenswrapper[4899]: E1003 08:53:55.025235 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-7psz6" podUID="2cbb69db-51ad-471c-be3a-57b9422f11cd" Oct 03 08:53:55 crc kubenswrapper[4899]: E1003 08:53:55.037205 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-f5mgb" podUID="837b07a9-5832-4d01-b257-ac3fca82b121" Oct 03 08:53:55 crc kubenswrapper[4899]: E1003 08:53:55.041033 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zvwbw" podUID="6c6cb9a6-eacf-411e-8c19-ac8ee51eced8" Oct 03 08:53:55 crc kubenswrapper[4899]: I1003 08:53:55.337249 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c"] Oct 03 08:53:55 crc kubenswrapper[4899]: I1003 08:53:55.729932 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-f5mgb" event={"ID":"837b07a9-5832-4d01-b257-ac3fca82b121","Type":"ContainerStarted","Data":"932359f57c8c36e715598ce34c0587fdfeb3778d24f91adf54637d671ac4239f"} Oct 03 08:53:55 crc kubenswrapper[4899]: E1003 08:53:55.734847 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed\\\"\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-f5mgb" podUID="837b07a9-5832-4d01-b257-ac3fca82b121" Oct 03 08:53:55 crc kubenswrapper[4899]: I1003 08:53:55.736201 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c" event={"ID":"2bb995f5-5432-40fc-a196-71cac18de666","Type":"ContainerStarted","Data":"21e2d93fb92e52f33524fd3b1d3da3e725e4407687ac2bd107e01595ff92e9a7"} Oct 03 08:53:55 crc kubenswrapper[4899]: I1003 08:53:55.743389 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zvwbw" event={"ID":"6c6cb9a6-eacf-411e-8c19-ac8ee51eced8","Type":"ContainerStarted","Data":"707c25e5464ec578880b75dc5be0dbcb93fac25d1a54f469f2eee9cbb864b9e2"} Oct 03 08:53:55 crc kubenswrapper[4899]: E1003 08:53:55.745568 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8f5eee2eb7b77432ef1a88ed693ff981514359dfc808581f393bcef252de5cfa\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zvwbw" podUID="6c6cb9a6-eacf-411e-8c19-ac8ee51eced8" Oct 03 08:53:55 crc kubenswrapper[4899]: I1003 08:53:55.748034 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2" event={"ID":"2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919","Type":"ContainerStarted","Data":"a38c0d520daf5c74e5139c6f1e74f47bba35106aabc06f51f3f3942642ff87a4"} Oct 03 08:53:55 crc kubenswrapper[4899]: E1003 08:53:55.754136 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:40fb1819b6639807b77ef79448d35f1e4bfc1838a09d4f380e9fa0f755352475\\\"\"" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2" podUID="2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919" Oct 03 08:53:55 crc kubenswrapper[4899]: I1003 08:53:55.780562 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-lxmmw" event={"ID":"7496bf16-1fc1-44ec-b96b-e75e00652634","Type":"ContainerStarted","Data":"6e0f3afb730e9bad20afab50813aa6d5a09c39c4b8271ff7bb0fa18a653c5db8"} Oct 03 08:53:55 crc kubenswrapper[4899]: E1003 08:53:55.786172 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:018151bd5ff830ec03c6b8e3d53cfb9456ca6e1e34793bdd4f7edd39a0146fa6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-lxmmw" podUID="7496bf16-1fc1-44ec-b96b-e75e00652634" Oct 03 08:53:55 crc kubenswrapper[4899]: I1003 08:53:55.792329 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg" event={"ID":"62eda81d-d797-4ed4-9687-9cdc7c49decb","Type":"ContainerStarted","Data":"43a6b62605c27e4c19db8faf64efdef5208da597b376d0f660c34b9c0f0e861e"} Oct 03 08:53:55 crc kubenswrapper[4899]: I1003 08:53:55.801742 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-lpwr4" event={"ID":"03abda9b-2057-42c8-8161-4104ecb96027","Type":"ContainerStarted","Data":"2174c130a2f93f1435cacb76d4b632fec3f297f6f5041d3a64a52621443a877f"} Oct 03 08:53:55 crc kubenswrapper[4899]: I1003 08:53:55.801793 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-lpwr4" event={"ID":"03abda9b-2057-42c8-8161-4104ecb96027","Type":"ContainerStarted","Data":"e7659d834b4bbed9ba998a96543dde7f978ace16e83ac1320a2261b1912b618c"} Oct 03 08:53:55 crc kubenswrapper[4899]: I1003 08:53:55.807612 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-bnb75" event={"ID":"6008780d-5be3-4fda-8526-594566364ae4","Type":"ContainerStarted","Data":"2083142b255bdc3bd9dd156a8cd650846c4ec11b7f042e5c21a432a966adfc44"} Oct 03 08:53:55 crc kubenswrapper[4899]: I1003 08:53:55.812796 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-7psz6" event={"ID":"2cbb69db-51ad-471c-be3a-57b9422f11cd","Type":"ContainerStarted","Data":"77fa259b8a6c82b6a05d25f13c992e6f1c671c0572022058eca402093297dc8a"} Oct 03 08:53:55 crc kubenswrapper[4899]: E1003 08:53:55.844538 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:725da67b3f9cf2758564e0111928cdd570c0f6f1ca34775f159bbe94deb82548\\\"\"" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-bnb75" podUID="6008780d-5be3-4fda-8526-594566364ae4" Oct 03 08:53:55 crc kubenswrapper[4899]: E1003 08:53:55.845843 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:5c6ab93b78bd20eb7f1736751a59c1eb33fb06351339563dbefe49ccaaff6e94\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-7psz6" podUID="2cbb69db-51ad-471c-be3a-57b9422f11cd" Oct 03 08:53:55 crc kubenswrapper[4899]: E1003 08:53:55.845963 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:0daf76cc40ab619ae266b11defcc1b65beb22d859369e7b1b04de9169089a4cb\\\"\"" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-lpwr4" podUID="03abda9b-2057-42c8-8161-4104ecb96027" Oct 03 08:53:55 crc kubenswrapper[4899]: E1003 08:53:55.857910 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"\"" pod="openshift-marketplace/community-operators-gbhrz" podUID="eaeb11f4-25fe-42cc-8bf4-0d6c93803c16" Oct 03 08:53:56 crc kubenswrapper[4899]: I1003 08:53:56.832523 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c" event={"ID":"2bb995f5-5432-40fc-a196-71cac18de666","Type":"ContainerStarted","Data":"eed49fa894c9a8772d703ec724d6a07eef14587c45a520bbc71ecc5c0994116d"} Oct 03 08:53:56 crc kubenswrapper[4899]: E1003 08:53:56.835807 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:018151bd5ff830ec03c6b8e3d53cfb9456ca6e1e34793bdd4f7edd39a0146fa6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-lxmmw" podUID="7496bf16-1fc1-44ec-b96b-e75e00652634" Oct 03 08:53:56 crc kubenswrapper[4899]: E1003 08:53:56.836138 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:5c6ab93b78bd20eb7f1736751a59c1eb33fb06351339563dbefe49ccaaff6e94\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-7psz6" podUID="2cbb69db-51ad-471c-be3a-57b9422f11cd" Oct 03 08:53:56 crc kubenswrapper[4899]: E1003 08:53:56.836252 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:0daf76cc40ab619ae266b11defcc1b65beb22d859369e7b1b04de9169089a4cb\\\"\"" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-lpwr4" podUID="03abda9b-2057-42c8-8161-4104ecb96027" Oct 03 08:53:56 crc kubenswrapper[4899]: E1003 08:53:56.837020 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8f5eee2eb7b77432ef1a88ed693ff981514359dfc808581f393bcef252de5cfa\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zvwbw" podUID="6c6cb9a6-eacf-411e-8c19-ac8ee51eced8" Oct 03 08:53:56 crc kubenswrapper[4899]: E1003 08:53:56.837149 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed\\\"\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-f5mgb" podUID="837b07a9-5832-4d01-b257-ac3fca82b121" Oct 03 08:53:56 crc kubenswrapper[4899]: E1003 08:53:56.837318 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:40fb1819b6639807b77ef79448d35f1e4bfc1838a09d4f380e9fa0f755352475\\\"\"" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2" podUID="2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919" Oct 03 08:53:56 crc kubenswrapper[4899]: E1003 08:53:56.837383 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:725da67b3f9cf2758564e0111928cdd570c0f6f1ca34775f159bbe94deb82548\\\"\"" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-bnb75" podUID="6008780d-5be3-4fda-8526-594566364ae4" Oct 03 08:53:58 crc kubenswrapper[4899]: I1003 08:53:58.994550 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-465sx"] Oct 03 08:53:58 crc kubenswrapper[4899]: I1003 08:53:58.996579 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-465sx" Oct 03 08:53:59 crc kubenswrapper[4899]: I1003 08:53:59.003295 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-465sx"] Oct 03 08:53:59 crc kubenswrapper[4899]: I1003 08:53:59.019319 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a-utilities\") pod \"redhat-marketplace-465sx\" (UID: \"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a\") " pod="openshift-marketplace/redhat-marketplace-465sx" Oct 03 08:53:59 crc kubenswrapper[4899]: I1003 08:53:59.019538 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a-catalog-content\") pod \"redhat-marketplace-465sx\" (UID: \"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a\") " pod="openshift-marketplace/redhat-marketplace-465sx" Oct 03 08:53:59 crc kubenswrapper[4899]: I1003 08:53:59.019708 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rccx7\" (UniqueName: \"kubernetes.io/projected/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a-kube-api-access-rccx7\") pod \"redhat-marketplace-465sx\" (UID: \"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a\") " pod="openshift-marketplace/redhat-marketplace-465sx" Oct 03 08:53:59 crc kubenswrapper[4899]: I1003 08:53:59.121235 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a-utilities\") pod \"redhat-marketplace-465sx\" (UID: \"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a\") " pod="openshift-marketplace/redhat-marketplace-465sx" Oct 03 08:53:59 crc kubenswrapper[4899]: I1003 08:53:59.121286 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a-catalog-content\") pod \"redhat-marketplace-465sx\" (UID: \"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a\") " pod="openshift-marketplace/redhat-marketplace-465sx" Oct 03 08:53:59 crc kubenswrapper[4899]: I1003 08:53:59.121323 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rccx7\" (UniqueName: \"kubernetes.io/projected/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a-kube-api-access-rccx7\") pod \"redhat-marketplace-465sx\" (UID: \"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a\") " pod="openshift-marketplace/redhat-marketplace-465sx" Oct 03 08:53:59 crc kubenswrapper[4899]: I1003 08:53:59.122335 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a-utilities\") pod \"redhat-marketplace-465sx\" (UID: \"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a\") " pod="openshift-marketplace/redhat-marketplace-465sx" Oct 03 08:53:59 crc kubenswrapper[4899]: I1003 08:53:59.123630 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a-catalog-content\") pod \"redhat-marketplace-465sx\" (UID: \"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a\") " pod="openshift-marketplace/redhat-marketplace-465sx" Oct 03 08:53:59 crc kubenswrapper[4899]: I1003 08:53:59.143977 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rccx7\" (UniqueName: \"kubernetes.io/projected/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a-kube-api-access-rccx7\") pod \"redhat-marketplace-465sx\" (UID: \"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a\") " pod="openshift-marketplace/redhat-marketplace-465sx" Oct 03 08:53:59 crc kubenswrapper[4899]: I1003 08:53:59.319033 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-465sx" Oct 03 08:54:00 crc kubenswrapper[4899]: I1003 08:54:00.807765 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rp2xc"] Oct 03 08:54:00 crc kubenswrapper[4899]: I1003 08:54:00.817149 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rp2xc" Oct 03 08:54:00 crc kubenswrapper[4899]: I1003 08:54:00.817643 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rp2xc"] Oct 03 08:54:00 crc kubenswrapper[4899]: I1003 08:54:00.943591 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fdc02e4-5ae5-4609-9946-b4bc62fa5687-catalog-content\") pod \"certified-operators-rp2xc\" (UID: \"2fdc02e4-5ae5-4609-9946-b4bc62fa5687\") " pod="openshift-marketplace/certified-operators-rp2xc" Oct 03 08:54:00 crc kubenswrapper[4899]: I1003 08:54:00.943694 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fdc02e4-5ae5-4609-9946-b4bc62fa5687-utilities\") pod \"certified-operators-rp2xc\" (UID: \"2fdc02e4-5ae5-4609-9946-b4bc62fa5687\") " pod="openshift-marketplace/certified-operators-rp2xc" Oct 03 08:54:00 crc kubenswrapper[4899]: I1003 08:54:00.943769 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khpbx\" (UniqueName: \"kubernetes.io/projected/2fdc02e4-5ae5-4609-9946-b4bc62fa5687-kube-api-access-khpbx\") pod \"certified-operators-rp2xc\" (UID: \"2fdc02e4-5ae5-4609-9946-b4bc62fa5687\") " pod="openshift-marketplace/certified-operators-rp2xc" Oct 03 08:54:01 crc kubenswrapper[4899]: I1003 08:54:01.044623 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fdc02e4-5ae5-4609-9946-b4bc62fa5687-utilities\") pod \"certified-operators-rp2xc\" (UID: \"2fdc02e4-5ae5-4609-9946-b4bc62fa5687\") " pod="openshift-marketplace/certified-operators-rp2xc" Oct 03 08:54:01 crc kubenswrapper[4899]: I1003 08:54:01.044705 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khpbx\" (UniqueName: \"kubernetes.io/projected/2fdc02e4-5ae5-4609-9946-b4bc62fa5687-kube-api-access-khpbx\") pod \"certified-operators-rp2xc\" (UID: \"2fdc02e4-5ae5-4609-9946-b4bc62fa5687\") " pod="openshift-marketplace/certified-operators-rp2xc" Oct 03 08:54:01 crc kubenswrapper[4899]: I1003 08:54:01.044746 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fdc02e4-5ae5-4609-9946-b4bc62fa5687-catalog-content\") pod \"certified-operators-rp2xc\" (UID: \"2fdc02e4-5ae5-4609-9946-b4bc62fa5687\") " pod="openshift-marketplace/certified-operators-rp2xc" Oct 03 08:54:01 crc kubenswrapper[4899]: I1003 08:54:01.045525 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fdc02e4-5ae5-4609-9946-b4bc62fa5687-catalog-content\") pod \"certified-operators-rp2xc\" (UID: \"2fdc02e4-5ae5-4609-9946-b4bc62fa5687\") " pod="openshift-marketplace/certified-operators-rp2xc" Oct 03 08:54:01 crc kubenswrapper[4899]: I1003 08:54:01.045662 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fdc02e4-5ae5-4609-9946-b4bc62fa5687-utilities\") pod \"certified-operators-rp2xc\" (UID: \"2fdc02e4-5ae5-4609-9946-b4bc62fa5687\") " pod="openshift-marketplace/certified-operators-rp2xc" Oct 03 08:54:01 crc kubenswrapper[4899]: I1003 08:54:01.081411 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khpbx\" (UniqueName: \"kubernetes.io/projected/2fdc02e4-5ae5-4609-9946-b4bc62fa5687-kube-api-access-khpbx\") pod \"certified-operators-rp2xc\" (UID: \"2fdc02e4-5ae5-4609-9946-b4bc62fa5687\") " pod="openshift-marketplace/certified-operators-rp2xc" Oct 03 08:54:01 crc kubenswrapper[4899]: I1003 08:54:01.139060 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rp2xc" Oct 03 08:54:01 crc kubenswrapper[4899]: I1003 08:54:01.874881 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c" event={"ID":"2bb995f5-5432-40fc-a196-71cac18de666","Type":"ContainerStarted","Data":"efddf43b6fab14886f54e2279c7bde47adbbddad037973dafe950c99ac011394"} Oct 03 08:54:01 crc kubenswrapper[4899]: I1003 08:54:01.876160 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c" Oct 03 08:54:01 crc kubenswrapper[4899]: I1003 08:54:01.885926 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c" Oct 03 08:54:01 crc kubenswrapper[4899]: I1003 08:54:01.896576 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-7cfd4b6679-jn88c" podStartSLOduration=9.896559952 podStartE2EDuration="9.896559952s" podCreationTimestamp="2025-10-03 08:53:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:54:01.896037536 +0000 UTC m=+816.003522509" watchObservedRunningTime="2025-10-03 08:54:01.896559952 +0000 UTC m=+816.004044905" Oct 03 08:54:05 crc kubenswrapper[4899]: I1003 08:54:05.197132 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tjx4z"] Oct 03 08:54:05 crc kubenswrapper[4899]: I1003 08:54:05.201372 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tjx4z" Oct 03 08:54:05 crc kubenswrapper[4899]: I1003 08:54:05.220589 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3d43d09-2fb5-4994-b485-fc405297d47b-catalog-content\") pod \"redhat-operators-tjx4z\" (UID: \"f3d43d09-2fb5-4994-b485-fc405297d47b\") " pod="openshift-marketplace/redhat-operators-tjx4z" Oct 03 08:54:05 crc kubenswrapper[4899]: I1003 08:54:05.221073 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5h5v\" (UniqueName: \"kubernetes.io/projected/f3d43d09-2fb5-4994-b485-fc405297d47b-kube-api-access-b5h5v\") pod \"redhat-operators-tjx4z\" (UID: \"f3d43d09-2fb5-4994-b485-fc405297d47b\") " pod="openshift-marketplace/redhat-operators-tjx4z" Oct 03 08:54:05 crc kubenswrapper[4899]: I1003 08:54:05.221132 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3d43d09-2fb5-4994-b485-fc405297d47b-utilities\") pod \"redhat-operators-tjx4z\" (UID: \"f3d43d09-2fb5-4994-b485-fc405297d47b\") " pod="openshift-marketplace/redhat-operators-tjx4z" Oct 03 08:54:05 crc kubenswrapper[4899]: I1003 08:54:05.234404 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tjx4z"] Oct 03 08:54:05 crc kubenswrapper[4899]: I1003 08:54:05.318412 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-465sx"] Oct 03 08:54:05 crc kubenswrapper[4899]: I1003 08:54:05.321967 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3d43d09-2fb5-4994-b485-fc405297d47b-catalog-content\") pod \"redhat-operators-tjx4z\" (UID: \"f3d43d09-2fb5-4994-b485-fc405297d47b\") " pod="openshift-marketplace/redhat-operators-tjx4z" Oct 03 08:54:05 crc kubenswrapper[4899]: I1003 08:54:05.323966 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5h5v\" (UniqueName: \"kubernetes.io/projected/f3d43d09-2fb5-4994-b485-fc405297d47b-kube-api-access-b5h5v\") pod \"redhat-operators-tjx4z\" (UID: \"f3d43d09-2fb5-4994-b485-fc405297d47b\") " pod="openshift-marketplace/redhat-operators-tjx4z" Oct 03 08:54:05 crc kubenswrapper[4899]: I1003 08:54:05.324193 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3d43d09-2fb5-4994-b485-fc405297d47b-utilities\") pod \"redhat-operators-tjx4z\" (UID: \"f3d43d09-2fb5-4994-b485-fc405297d47b\") " pod="openshift-marketplace/redhat-operators-tjx4z" Oct 03 08:54:05 crc kubenswrapper[4899]: I1003 08:54:05.324044 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3d43d09-2fb5-4994-b485-fc405297d47b-catalog-content\") pod \"redhat-operators-tjx4z\" (UID: \"f3d43d09-2fb5-4994-b485-fc405297d47b\") " pod="openshift-marketplace/redhat-operators-tjx4z" Oct 03 08:54:05 crc kubenswrapper[4899]: I1003 08:54:05.324725 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3d43d09-2fb5-4994-b485-fc405297d47b-utilities\") pod \"redhat-operators-tjx4z\" (UID: \"f3d43d09-2fb5-4994-b485-fc405297d47b\") " pod="openshift-marketplace/redhat-operators-tjx4z" Oct 03 08:54:05 crc kubenswrapper[4899]: I1003 08:54:05.360643 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5h5v\" (UniqueName: \"kubernetes.io/projected/f3d43d09-2fb5-4994-b485-fc405297d47b-kube-api-access-b5h5v\") pod \"redhat-operators-tjx4z\" (UID: \"f3d43d09-2fb5-4994-b485-fc405297d47b\") " pod="openshift-marketplace/redhat-operators-tjx4z" Oct 03 08:54:05 crc kubenswrapper[4899]: I1003 08:54:05.387906 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rp2xc"] Oct 03 08:54:05 crc kubenswrapper[4899]: W1003 08:54:05.430070 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2fdc02e4_5ae5_4609_9946_b4bc62fa5687.slice/crio-2b125d8750b07725fe3397ef3eb581a65f4b0e212dbf88558ce29ac8e56cae00 WatchSource:0}: Error finding container 2b125d8750b07725fe3397ef3eb581a65f4b0e212dbf88558ce29ac8e56cae00: Status 404 returned error can't find the container with id 2b125d8750b07725fe3397ef3eb581a65f4b0e212dbf88558ce29ac8e56cae00 Oct 03 08:54:05 crc kubenswrapper[4899]: I1003 08:54:05.561221 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tjx4z" Oct 03 08:54:05 crc kubenswrapper[4899]: I1003 08:54:05.966538 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-jqgrz" event={"ID":"ba75f5b9-b92b-4cd7-98c9-1bcf6b772940","Type":"ContainerStarted","Data":"58aec529a98d6fad05e4faca528e6e601c082eb88488bf8f787ba997b1253a30"} Oct 03 08:54:05 crc kubenswrapper[4899]: I1003 08:54:05.995986 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-n8q4j" event={"ID":"5c4913b3-fd20-4eee-99df-1900f5486f51","Type":"ContainerStarted","Data":"483c34bff0a0495cab57e1ae780e4a788a5e59c75d5a15c459dd1cc25a641959"} Oct 03 08:54:06 crc kubenswrapper[4899]: I1003 08:54:06.005681 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-trrmd" event={"ID":"110672ad-3117-4a7c-8614-f12ab626e28c","Type":"ContainerStarted","Data":"db2f1fadb079b98bf496eb96a33da4c70d4e78d5b1f02d86167f53d1ed7d3eb7"} Oct 03 08:54:06 crc kubenswrapper[4899]: I1003 08:54:06.061260 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-qxxgx" event={"ID":"d8d28854-8e4e-47cd-847a-c58811fb4f91","Type":"ContainerStarted","Data":"8cd947ad5423a4939b93b99c60f5c9b907e0e16c95182e7f4642937964f67876"} Oct 03 08:54:06 crc kubenswrapper[4899]: I1003 08:54:06.069451 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-n8q4j" podStartSLOduration=3.7774268859999998 podStartE2EDuration="14.06942952s" podCreationTimestamp="2025-10-03 08:53:52 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.546959152 +0000 UTC m=+808.654444105" lastFinishedPulling="2025-10-03 08:54:04.838961786 +0000 UTC m=+818.946446739" observedRunningTime="2025-10-03 08:54:06.042711109 +0000 UTC m=+820.150196062" watchObservedRunningTime="2025-10-03 08:54:06.06942952 +0000 UTC m=+820.176914473" Oct 03 08:54:06 crc kubenswrapper[4899]: I1003 08:54:06.107210 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-hwhx2" event={"ID":"ed44541c-bb31-43bb-92eb-298b01820505","Type":"ContainerStarted","Data":"34859b57a2832e8481f381cc569eb430aa481ffc08e29b033313c76748ae7c5a"} Oct 03 08:54:06 crc kubenswrapper[4899]: I1003 08:54:06.132180 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-7zf22" event={"ID":"de3ec379-fb48-440a-8502-3650db78804a","Type":"ContainerStarted","Data":"79e153a3d8d44d4f8c6a287b224bde8969e13e1e41a05daa89ca028586e67b65"} Oct 03 08:54:06 crc kubenswrapper[4899]: I1003 08:54:06.191264 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-hz7qr" event={"ID":"c90f297d-af70-423f-b34d-8b3599ba12eb","Type":"ContainerStarted","Data":"042cc9a8f712a414523eab51e8ad273f99a880b2c04c7527ffe981c01f5c174c"} Oct 03 08:54:06 crc kubenswrapper[4899]: I1003 08:54:06.206073 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-599898f689-5c9bs" event={"ID":"b14b68d1-483a-419c-b696-a915c6d25d09","Type":"ContainerStarted","Data":"5d7a6930d5c7c2e47f8577a60122259c16faedaea93bc659215702d97e5cb910"} Oct 03 08:54:06 crc kubenswrapper[4899]: I1003 08:54:06.237175 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-k8snv" event={"ID":"a8895f13-915f-45f7-8156-43a7f11ac9bb","Type":"ContainerStarted","Data":"f74b19fc23c235ee66431cbb3415043e2fa608655aa87be11b664b227ced1778"} Oct 03 08:54:06 crc kubenswrapper[4899]: I1003 08:54:06.271114 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-bj2gj" event={"ID":"3e876467-fd1a-4b4c-b62b-d1641400a756","Type":"ContainerStarted","Data":"44e4a194a4095b440a576500022d34319384e861ba3faada46482613068a7837"} Oct 03 08:54:06 crc kubenswrapper[4899]: I1003 08:54:06.279133 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-dglwx" event={"ID":"f500aadc-0447-4d26-9ab4-83f64b084a89","Type":"ContainerStarted","Data":"16e9bbc08bf1d05d44ad13f4c85635de3c0f185f53fe36c8a8aaab2a048608d7"} Oct 03 08:54:06 crc kubenswrapper[4899]: I1003 08:54:06.291472 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-hpqjs" event={"ID":"39eb57f7-d61f-4445-aea3-6b96585c4f76","Type":"ContainerStarted","Data":"f7bcf35fb8ccaac9c4142eed4f87ca7bd692cc0fca9a93c452f6c37d9fd5e68b"} Oct 03 08:54:06 crc kubenswrapper[4899]: I1003 08:54:06.316571 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-465sx" event={"ID":"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a","Type":"ContainerStarted","Data":"3fdbf74abf04e267a3da2725fec772a6703a152cc734cb7d7e7d2480b26e7023"} Oct 03 08:54:06 crc kubenswrapper[4899]: I1003 08:54:06.344125 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-78vk7" event={"ID":"20ecf87a-f08f-4d2f-92fd-ba14a9a9e5b2","Type":"ContainerStarted","Data":"458cbde989d8acab2d7f912feb97dbd8650644d7bade787f3e3d466288cea682"} Oct 03 08:54:06 crc kubenswrapper[4899]: I1003 08:54:06.362994 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-mhkqv" event={"ID":"b32f9b3e-72a8-4229-9715-8fdd98877a04","Type":"ContainerStarted","Data":"cf2d7e0a0f9ba4594025898170ad4cace2b4127d7712fd68cf825b3c21245236"} Oct 03 08:54:06 crc kubenswrapper[4899]: I1003 08:54:06.387971 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rp2xc" event={"ID":"2fdc02e4-5ae5-4609-9946-b4bc62fa5687","Type":"ContainerStarted","Data":"dce067edaf2417a8cd56ca66e7d60207bdd46ab3d054dafd5d7a401d0ecd1850"} Oct 03 08:54:06 crc kubenswrapper[4899]: I1003 08:54:06.388010 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rp2xc" event={"ID":"2fdc02e4-5ae5-4609-9946-b4bc62fa5687","Type":"ContainerStarted","Data":"2b125d8750b07725fe3397ef3eb581a65f4b0e212dbf88558ce29ac8e56cae00"} Oct 03 08:54:06 crc kubenswrapper[4899]: I1003 08:54:06.418130 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg" event={"ID":"62eda81d-d797-4ed4-9687-9cdc7c49decb","Type":"ContainerStarted","Data":"085e6ec29c0b52ee07ea276e26e0c8fcd5066693992486bc1ee48ad06077daad"} Oct 03 08:54:06 crc kubenswrapper[4899]: I1003 08:54:06.420646 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tjx4z"] Oct 03 08:54:06 crc kubenswrapper[4899]: W1003 08:54:06.543935 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf3d43d09_2fb5_4994_b485_fc405297d47b.slice/crio-9166497e92b0503d789b1c57760ed99a903ffde7572e201ac9b358461038b415 WatchSource:0}: Error finding container 9166497e92b0503d789b1c57760ed99a903ffde7572e201ac9b358461038b415: Status 404 returned error can't find the container with id 9166497e92b0503d789b1c57760ed99a903ffde7572e201ac9b358461038b415 Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.427224 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg" event={"ID":"62eda81d-d797-4ed4-9687-9cdc7c49decb","Type":"ContainerStarted","Data":"4ad826c1ae43f26031581d569f4565c202d306b887928cd0d33c03354b016b77"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.427564 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.430021 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-dglwx" event={"ID":"f500aadc-0447-4d26-9ab4-83f64b084a89","Type":"ContainerStarted","Data":"27e1ff3cf9442ba70e89118d424d8746efec712f17785c6ff1b5d976d299500e"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.430134 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-dglwx" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.431848 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-hz7qr" event={"ID":"c90f297d-af70-423f-b34d-8b3599ba12eb","Type":"ContainerStarted","Data":"289ab15dbcfe0dd57882386810d70217957dad9dbab96af5625ac56be21ce83c"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.432139 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-hz7qr" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.435276 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-qxxgx" event={"ID":"d8d28854-8e4e-47cd-847a-c58811fb4f91","Type":"ContainerStarted","Data":"e56897178fd61585439192c3da4fddf1ad237fe0fd7f0f6dd27bc4f98f275221"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.435333 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-qxxgx" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.437679 4899 generic.go:334] "Generic (PLEG): container finished" podID="2fdc02e4-5ae5-4609-9946-b4bc62fa5687" containerID="dce067edaf2417a8cd56ca66e7d60207bdd46ab3d054dafd5d7a401d0ecd1850" exitCode=0 Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.437776 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rp2xc" event={"ID":"2fdc02e4-5ae5-4609-9946-b4bc62fa5687","Type":"ContainerDied","Data":"dce067edaf2417a8cd56ca66e7d60207bdd46ab3d054dafd5d7a401d0ecd1850"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.437806 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rp2xc" event={"ID":"2fdc02e4-5ae5-4609-9946-b4bc62fa5687","Type":"ContainerStarted","Data":"f9885125a6f6f811401e31f990b7875027cd7c2006f42832e20e6b86a99f35f6"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.440444 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-mhkqv" event={"ID":"b32f9b3e-72a8-4229-9715-8fdd98877a04","Type":"ContainerStarted","Data":"d40312c9583ea03a7dc24936c523474bc88adddbc2a2a01be0ce30779d30f82d"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.440864 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-mhkqv" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.446269 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-hpqjs" event={"ID":"39eb57f7-d61f-4445-aea3-6b96585c4f76","Type":"ContainerStarted","Data":"3ccc465590a8b0aa0d21d14310df151f6e6ee2c0cbd346a6d9b8b10e738eb0ce"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.446743 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-hpqjs" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.448706 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-hwhx2" event={"ID":"ed44541c-bb31-43bb-92eb-298b01820505","Type":"ContainerStarted","Data":"f89fc3ceb819c6bd76e6dafd71035fa87421e7ee23a2877c7b695a3dab169937"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.449136 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-hwhx2" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.453955 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-599898f689-5c9bs" event={"ID":"b14b68d1-483a-419c-b696-a915c6d25d09","Type":"ContainerStarted","Data":"4616c2a67de75facac45f31987eaa7b8b78cc7707df46d70c5fe5598646b0782"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.454152 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-599898f689-5c9bs" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.457029 4899 generic.go:334] "Generic (PLEG): container finished" podID="f3d43d09-2fb5-4994-b485-fc405297d47b" containerID="70bac5f66c6062ce580e768999364aeb07d3fa36d1d095cffc5cdc3228527b7f" exitCode=0 Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.457092 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjx4z" event={"ID":"f3d43d09-2fb5-4994-b485-fc405297d47b","Type":"ContainerDied","Data":"70bac5f66c6062ce580e768999364aeb07d3fa36d1d095cffc5cdc3228527b7f"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.457113 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjx4z" event={"ID":"f3d43d09-2fb5-4994-b485-fc405297d47b","Type":"ContainerStarted","Data":"9166497e92b0503d789b1c57760ed99a903ffde7572e201ac9b358461038b415"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.457250 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg" podStartSLOduration=5.280261649 podStartE2EDuration="15.457223919s" podCreationTimestamp="2025-10-03 08:53:52 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.685530016 +0000 UTC m=+808.793014969" lastFinishedPulling="2025-10-03 08:54:04.862492286 +0000 UTC m=+818.969977239" observedRunningTime="2025-10-03 08:54:07.450861539 +0000 UTC m=+821.558346492" watchObservedRunningTime="2025-10-03 08:54:07.457223919 +0000 UTC m=+821.564708872" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.464805 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-jqgrz" event={"ID":"ba75f5b9-b92b-4cd7-98c9-1bcf6b772940","Type":"ContainerStarted","Data":"94eb0c65fe027feb49283fe8b0d4b0d12408723123512c36ecd71fbc3f4a6d9e"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.465512 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-jqgrz" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.476630 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-7zf22" event={"ID":"de3ec379-fb48-440a-8502-3650db78804a","Type":"ContainerStarted","Data":"1de8d40569ba02083618bba314e83d1bd5f9a2aba881c855c66b905a56445d8c"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.477028 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-7zf22" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.478270 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-hz7qr" podStartSLOduration=5.087645016 podStartE2EDuration="16.478258132s" podCreationTimestamp="2025-10-03 08:53:51 +0000 UTC" firstStartedPulling="2025-10-03 08:53:53.466485131 +0000 UTC m=+807.573970084" lastFinishedPulling="2025-10-03 08:54:04.857098257 +0000 UTC m=+818.964583200" observedRunningTime="2025-10-03 08:54:07.476347042 +0000 UTC m=+821.583831995" watchObservedRunningTime="2025-10-03 08:54:07.478258132 +0000 UTC m=+821.585743085" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.491311 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-78vk7" event={"ID":"20ecf87a-f08f-4d2f-92fd-ba14a9a9e5b2","Type":"ContainerStarted","Data":"b73581b0235ddd7a05dfe941f88b35bc80dfbfe4c7d723e85d057d146afdc38d"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.491360 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-78vk7" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.492780 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-trrmd" event={"ID":"110672ad-3117-4a7c-8614-f12ab626e28c","Type":"ContainerStarted","Data":"7ba253ce5066234e0f70601e304d22d8f45f06f306c905813589f37bcdf0bcf5"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.494411 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-trrmd" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.506469 4899 generic.go:334] "Generic (PLEG): container finished" podID="721b0e3a-b699-43a6-b9f5-43e5e52b1d7a" containerID="373e0ca145b58299804fbfc46040f60e02903bb90bd441965f8b690328054f90" exitCode=0 Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.506508 4899 generic.go:334] "Generic (PLEG): container finished" podID="721b0e3a-b699-43a6-b9f5-43e5e52b1d7a" containerID="1a93ce15f3289958d00764b7f4c57d5981facd300b0085163de3ddd5cfa46690" exitCode=0 Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.506590 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-465sx" event={"ID":"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a","Type":"ContainerDied","Data":"373e0ca145b58299804fbfc46040f60e02903bb90bd441965f8b690328054f90"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.506622 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-465sx" event={"ID":"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a","Type":"ContainerDied","Data":"1a93ce15f3289958d00764b7f4c57d5981facd300b0085163de3ddd5cfa46690"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.517879 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-k8snv" event={"ID":"a8895f13-915f-45f7-8156-43a7f11ac9bb","Type":"ContainerStarted","Data":"428cc5ab7f747b7b06c6cc562dd0a36e7444fc269d03a6fefaa887578281cd74"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.517952 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-k8snv" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.521567 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-bj2gj" event={"ID":"3e876467-fd1a-4b4c-b62b-d1641400a756","Type":"ContainerStarted","Data":"c2b53b23d1b7033690a5a70eb48c23f6e5c86b85b649806b24b1f3cd8840e0ca"} Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.521661 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-bj2gj" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.529319 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-mhkqv" podStartSLOduration=5.897043969 podStartE2EDuration="16.529305969s" podCreationTimestamp="2025-10-03 08:53:51 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.206739867 +0000 UTC m=+808.314224820" lastFinishedPulling="2025-10-03 08:54:04.839001867 +0000 UTC m=+818.946486820" observedRunningTime="2025-10-03 08:54:07.521966338 +0000 UTC m=+821.629451291" watchObservedRunningTime="2025-10-03 08:54:07.529305969 +0000 UTC m=+821.636790922" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.549386 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-hpqjs" podStartSLOduration=4.879317861 podStartE2EDuration="15.549365602s" podCreationTimestamp="2025-10-03 08:53:52 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.187671216 +0000 UTC m=+808.295156169" lastFinishedPulling="2025-10-03 08:54:04.857718967 +0000 UTC m=+818.965203910" observedRunningTime="2025-10-03 08:54:07.545407597 +0000 UTC m=+821.652892550" watchObservedRunningTime="2025-10-03 08:54:07.549365602 +0000 UTC m=+821.656850555" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.587039 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-dglwx" podStartSLOduration=4.908335475 podStartE2EDuration="15.587021108s" podCreationTimestamp="2025-10-03 08:53:52 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.178733404 +0000 UTC m=+808.286218357" lastFinishedPulling="2025-10-03 08:54:04.857419037 +0000 UTC m=+818.964903990" observedRunningTime="2025-10-03 08:54:07.581215384 +0000 UTC m=+821.688700337" watchObservedRunningTime="2025-10-03 08:54:07.587021108 +0000 UTC m=+821.694506061" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.602647 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-hwhx2" podStartSLOduration=4.869850664 podStartE2EDuration="15.60263434s" podCreationTimestamp="2025-10-03 08:53:52 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.127544093 +0000 UTC m=+808.235029046" lastFinishedPulling="2025-10-03 08:54:04.860327769 +0000 UTC m=+818.967812722" observedRunningTime="2025-10-03 08:54:07.602033951 +0000 UTC m=+821.709518904" watchObservedRunningTime="2025-10-03 08:54:07.60263434 +0000 UTC m=+821.710119293" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.621130 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-qxxgx" podStartSLOduration=4.922294464 podStartE2EDuration="15.621108561s" podCreationTimestamp="2025-10-03 08:53:52 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.164183076 +0000 UTC m=+808.271668029" lastFinishedPulling="2025-10-03 08:54:04.862997173 +0000 UTC m=+818.970482126" observedRunningTime="2025-10-03 08:54:07.620359327 +0000 UTC m=+821.727844270" watchObservedRunningTime="2025-10-03 08:54:07.621108561 +0000 UTC m=+821.728593514" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.637683 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-78vk7" podStartSLOduration=5.326396982 podStartE2EDuration="15.637656832s" podCreationTimestamp="2025-10-03 08:53:52 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.546538089 +0000 UTC m=+808.654023042" lastFinishedPulling="2025-10-03 08:54:04.857797939 +0000 UTC m=+818.965282892" observedRunningTime="2025-10-03 08:54:07.636369662 +0000 UTC m=+821.743854615" watchObservedRunningTime="2025-10-03 08:54:07.637656832 +0000 UTC m=+821.745141785" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.660596 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-599898f689-5c9bs" podStartSLOduration=5.98883913 podStartE2EDuration="16.660577474s" podCreationTimestamp="2025-10-03 08:53:51 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.185144226 +0000 UTC m=+808.292629179" lastFinishedPulling="2025-10-03 08:54:04.85688257 +0000 UTC m=+818.964367523" observedRunningTime="2025-10-03 08:54:07.655721851 +0000 UTC m=+821.763206804" watchObservedRunningTime="2025-10-03 08:54:07.660577474 +0000 UTC m=+821.768062427" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.680923 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-k8snv" podStartSLOduration=5.046134505 podStartE2EDuration="15.680870013s" podCreationTimestamp="2025-10-03 08:53:52 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.207140759 +0000 UTC m=+808.314625712" lastFinishedPulling="2025-10-03 08:54:04.841876267 +0000 UTC m=+818.949361220" observedRunningTime="2025-10-03 08:54:07.677716684 +0000 UTC m=+821.785201637" watchObservedRunningTime="2025-10-03 08:54:07.680870013 +0000 UTC m=+821.788354966" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.698846 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-trrmd" podStartSLOduration=5.399268516 podStartE2EDuration="15.698827559s" podCreationTimestamp="2025-10-03 08:53:52 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.549608695 +0000 UTC m=+808.657093648" lastFinishedPulling="2025-10-03 08:54:04.849167738 +0000 UTC m=+818.956652691" observedRunningTime="2025-10-03 08:54:07.694716809 +0000 UTC m=+821.802201762" watchObservedRunningTime="2025-10-03 08:54:07.698827559 +0000 UTC m=+821.806312512" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.769361 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-jqgrz" podStartSLOduration=5.6719686 podStartE2EDuration="16.76934389s" podCreationTimestamp="2025-10-03 08:53:51 +0000 UTC" firstStartedPulling="2025-10-03 08:53:53.741624247 +0000 UTC m=+807.849109200" lastFinishedPulling="2025-10-03 08:54:04.838999527 +0000 UTC m=+818.946484490" observedRunningTime="2025-10-03 08:54:07.769060431 +0000 UTC m=+821.876545394" watchObservedRunningTime="2025-10-03 08:54:07.76934389 +0000 UTC m=+821.876828843" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.789442 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-7zf22" podStartSLOduration=5.210365566 podStartE2EDuration="15.789417412s" podCreationTimestamp="2025-10-03 08:53:52 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.277326049 +0000 UTC m=+808.384811002" lastFinishedPulling="2025-10-03 08:54:04.856377884 +0000 UTC m=+818.963862848" observedRunningTime="2025-10-03 08:54:07.78584474 +0000 UTC m=+821.893329693" watchObservedRunningTime="2025-10-03 08:54:07.789417412 +0000 UTC m=+821.896902365" Oct 03 08:54:07 crc kubenswrapper[4899]: I1003 08:54:07.805234 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-bj2gj" podStartSLOduration=5.426100335 podStartE2EDuration="16.805216429s" podCreationTimestamp="2025-10-03 08:53:51 +0000 UTC" firstStartedPulling="2025-10-03 08:53:53.482120224 +0000 UTC m=+807.589605177" lastFinishedPulling="2025-10-03 08:54:04.861236318 +0000 UTC m=+818.968721271" observedRunningTime="2025-10-03 08:54:07.801113731 +0000 UTC m=+821.908598684" watchObservedRunningTime="2025-10-03 08:54:07.805216429 +0000 UTC m=+821.912701382" Oct 03 08:54:08 crc kubenswrapper[4899]: I1003 08:54:08.528345 4899 generic.go:334] "Generic (PLEG): container finished" podID="2fdc02e4-5ae5-4609-9946-b4bc62fa5687" containerID="f9885125a6f6f811401e31f990b7875027cd7c2006f42832e20e6b86a99f35f6" exitCode=0 Oct 03 08:54:08 crc kubenswrapper[4899]: I1003 08:54:08.540743 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rp2xc" event={"ID":"2fdc02e4-5ae5-4609-9946-b4bc62fa5687","Type":"ContainerDied","Data":"f9885125a6f6f811401e31f990b7875027cd7c2006f42832e20e6b86a99f35f6"} Oct 03 08:54:12 crc kubenswrapper[4899]: I1003 08:54:12.319854 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-hz7qr" Oct 03 08:54:12 crc kubenswrapper[4899]: I1003 08:54:12.384949 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-bj2gj" Oct 03 08:54:12 crc kubenswrapper[4899]: I1003 08:54:12.431908 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-jqgrz" Oct 03 08:54:12 crc kubenswrapper[4899]: I1003 08:54:12.465817 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-mhkqv" Oct 03 08:54:12 crc kubenswrapper[4899]: I1003 08:54:12.514268 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-599898f689-5c9bs" Oct 03 08:54:12 crc kubenswrapper[4899]: I1003 08:54:12.517385 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-k8snv" Oct 03 08:54:12 crc kubenswrapper[4899]: I1003 08:54:12.571046 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gbhrz" event={"ID":"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16","Type":"ContainerStarted","Data":"2b9fb7345e49f8df16445b980a4cf8cd676061d063b9774d2706e7e55d814ba7"} Oct 03 08:54:12 crc kubenswrapper[4899]: I1003 08:54:12.574777 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-465sx" event={"ID":"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a","Type":"ContainerStarted","Data":"31b3eeea9157431ad05c2219f921c9b17d14b8a4d82516a68f139c3cd7a97092"} Oct 03 08:54:12 crc kubenswrapper[4899]: I1003 08:54:12.587261 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjx4z" event={"ID":"f3d43d09-2fb5-4994-b485-fc405297d47b","Type":"ContainerStarted","Data":"3b9f076fb3afe15d2debdd6cd60405d49e751d70e5978878a831241fc5d123af"} Oct 03 08:54:12 crc kubenswrapper[4899]: I1003 08:54:12.591031 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rp2xc" event={"ID":"2fdc02e4-5ae5-4609-9946-b4bc62fa5687","Type":"ContainerStarted","Data":"925c561cc4fa89740e0e830780edf5fe4c56cb33b556208a24bd8794e7cedf9b"} Oct 03 08:54:12 crc kubenswrapper[4899]: I1003 08:54:12.603676 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gbhrz" podStartSLOduration=3.375029196 podStartE2EDuration="21.603654159s" podCreationTimestamp="2025-10-03 08:53:51 +0000 UTC" firstStartedPulling="2025-10-03 08:53:53.601253886 +0000 UTC m=+807.708738839" lastFinishedPulling="2025-10-03 08:54:11.829878849 +0000 UTC m=+825.937363802" observedRunningTime="2025-10-03 08:54:12.596561875 +0000 UTC m=+826.704046828" watchObservedRunningTime="2025-10-03 08:54:12.603654159 +0000 UTC m=+826.711139112" Oct 03 08:54:12 crc kubenswrapper[4899]: I1003 08:54:12.635331 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rp2xc" podStartSLOduration=7.290209138 podStartE2EDuration="12.635309956s" podCreationTimestamp="2025-10-03 08:54:00 +0000 UTC" firstStartedPulling="2025-10-03 08:54:06.505102001 +0000 UTC m=+820.612586954" lastFinishedPulling="2025-10-03 08:54:11.850202819 +0000 UTC m=+825.957687772" observedRunningTime="2025-10-03 08:54:12.634743818 +0000 UTC m=+826.742228771" watchObservedRunningTime="2025-10-03 08:54:12.635309956 +0000 UTC m=+826.742794909" Oct 03 08:54:12 crc kubenswrapper[4899]: I1003 08:54:12.788477 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-hwhx2" Oct 03 08:54:12 crc kubenswrapper[4899]: I1003 08:54:12.815869 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-465sx" podStartSLOduration=9.320845874 podStartE2EDuration="14.815853462s" podCreationTimestamp="2025-10-03 08:53:58 +0000 UTC" firstStartedPulling="2025-10-03 08:54:06.351082541 +0000 UTC m=+820.458567494" lastFinishedPulling="2025-10-03 08:54:11.846090129 +0000 UTC m=+825.953575082" observedRunningTime="2025-10-03 08:54:12.669116111 +0000 UTC m=+826.776601064" watchObservedRunningTime="2025-10-03 08:54:12.815853462 +0000 UTC m=+826.923338415" Oct 03 08:54:12 crc kubenswrapper[4899]: I1003 08:54:12.855193 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-dglwx" Oct 03 08:54:12 crc kubenswrapper[4899]: I1003 08:54:12.897102 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-7zf22" Oct 03 08:54:12 crc kubenswrapper[4899]: I1003 08:54:12.934158 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-hpqjs" Oct 03 08:54:12 crc kubenswrapper[4899]: I1003 08:54:12.942694 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-qxxgx" Oct 03 08:54:13 crc kubenswrapper[4899]: I1003 08:54:13.018759 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-78vk7" Oct 03 08:54:13 crc kubenswrapper[4899]: I1003 08:54:13.035582 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-trrmd" Oct 03 08:54:13 crc kubenswrapper[4899]: I1003 08:54:13.601192 4899 generic.go:334] "Generic (PLEG): container finished" podID="f3d43d09-2fb5-4994-b485-fc405297d47b" containerID="3b9f076fb3afe15d2debdd6cd60405d49e751d70e5978878a831241fc5d123af" exitCode=0 Oct 03 08:54:13 crc kubenswrapper[4899]: I1003 08:54:13.602277 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjx4z" event={"ID":"f3d43d09-2fb5-4994-b485-fc405297d47b","Type":"ContainerDied","Data":"3b9f076fb3afe15d2debdd6cd60405d49e751d70e5978878a831241fc5d123af"} Oct 03 08:54:13 crc kubenswrapper[4899]: I1003 08:54:13.775106 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg" Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.652125 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2" event={"ID":"2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919","Type":"ContainerStarted","Data":"da55007e221c48ba9649fefe04d27b5018a9a498c81543c555268f4017a0ce99"} Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.653561 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2" Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.661028 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zvwbw" event={"ID":"6c6cb9a6-eacf-411e-8c19-ac8ee51eced8","Type":"ContainerStarted","Data":"eef0401490ecf1deb51988bf78ebd713a8e5c8b8cef3a7c8f18d946a9c043280"} Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.661623 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zvwbw" Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.669560 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-lxmmw" event={"ID":"7496bf16-1fc1-44ec-b96b-e75e00652634","Type":"ContainerStarted","Data":"728305c1e0d452b98f30feb7b03cce49ad1cedfc56827ac6db82a0066de1baa0"} Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.670421 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-lxmmw" Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.673796 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-lpwr4" event={"ID":"03abda9b-2057-42c8-8161-4104ecb96027","Type":"ContainerStarted","Data":"4856f5cd01f5ef57cb8237694e06e1fa97e4a8c6f68c34f4c97e8650b51f95ae"} Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.674498 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-lpwr4" Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.674579 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2" podStartSLOduration=3.57469676 podStartE2EDuration="25.67456945s" podCreationTimestamp="2025-10-03 08:53:52 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.555509791 +0000 UTC m=+808.662994744" lastFinishedPulling="2025-10-03 08:54:16.655382481 +0000 UTC m=+830.762867434" observedRunningTime="2025-10-03 08:54:17.673440705 +0000 UTC m=+831.780925668" watchObservedRunningTime="2025-10-03 08:54:17.67456945 +0000 UTC m=+831.782054403" Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.680252 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-bnb75" event={"ID":"6008780d-5be3-4fda-8526-594566364ae4","Type":"ContainerStarted","Data":"a3b86b812519d60110f891c1386a0df997da47df4b9e71a2f1f843d5927e4839"} Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.681272 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-bnb75" Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.686603 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-f5mgb" event={"ID":"837b07a9-5832-4d01-b257-ac3fca82b121","Type":"ContainerStarted","Data":"83f4f3fd2903e7b38401e4a6288d34860d95b383d63a552fab8c799584fabcb3"} Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.687455 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-f5mgb" Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.689549 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-7psz6" event={"ID":"2cbb69db-51ad-471c-be3a-57b9422f11cd","Type":"ContainerStarted","Data":"63a42c66e985d9b85799aa30aebc84994921f8a943e922fc7102ac2bf0ccb7f0"} Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.689768 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-7psz6" Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.692151 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjx4z" event={"ID":"f3d43d09-2fb5-4994-b485-fc405297d47b","Type":"ContainerStarted","Data":"dd6d67320743be235fdaae990dda97103fd800bbea3d71b7ff2e220899ebc05b"} Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.702876 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zvwbw" podStartSLOduration=3.643061395 podStartE2EDuration="25.702842681s" podCreationTimestamp="2025-10-03 08:53:52 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.596013617 +0000 UTC m=+808.703498570" lastFinishedPulling="2025-10-03 08:54:16.655794903 +0000 UTC m=+830.763279856" observedRunningTime="2025-10-03 08:54:17.699929319 +0000 UTC m=+831.807414272" watchObservedRunningTime="2025-10-03 08:54:17.702842681 +0000 UTC m=+831.810327634" Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.718657 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-lxmmw" podStartSLOduration=3.643987255 podStartE2EDuration="25.718636669s" podCreationTimestamp="2025-10-03 08:53:52 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.580823159 +0000 UTC m=+808.688308112" lastFinishedPulling="2025-10-03 08:54:16.655472573 +0000 UTC m=+830.762957526" observedRunningTime="2025-10-03 08:54:17.714970073 +0000 UTC m=+831.822455036" watchObservedRunningTime="2025-10-03 08:54:17.718636669 +0000 UTC m=+831.826121632" Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.734927 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-7psz6" podStartSLOduration=3.645192531 podStartE2EDuration="25.734909371s" podCreationTimestamp="2025-10-03 08:53:52 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.565750973 +0000 UTC m=+808.673235926" lastFinishedPulling="2025-10-03 08:54:16.655467813 +0000 UTC m=+830.762952766" observedRunningTime="2025-10-03 08:54:17.732940469 +0000 UTC m=+831.840425422" watchObservedRunningTime="2025-10-03 08:54:17.734909371 +0000 UTC m=+831.842394354" Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.760243 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-f5mgb" podStartSLOduration=3.680422681 podStartE2EDuration="25.760218928s" podCreationTimestamp="2025-10-03 08:53:52 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.56436446 +0000 UTC m=+808.671849413" lastFinishedPulling="2025-10-03 08:54:16.644160707 +0000 UTC m=+830.751645660" observedRunningTime="2025-10-03 08:54:17.754285851 +0000 UTC m=+831.861770804" watchObservedRunningTime="2025-10-03 08:54:17.760218928 +0000 UTC m=+831.867703881" Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.780256 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-bnb75" podStartSLOduration=3.675776845 podStartE2EDuration="25.780237139s" podCreationTimestamp="2025-10-03 08:53:52 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.551006319 +0000 UTC m=+808.658491272" lastFinishedPulling="2025-10-03 08:54:16.655466613 +0000 UTC m=+830.762951566" observedRunningTime="2025-10-03 08:54:17.773991111 +0000 UTC m=+831.881476084" watchObservedRunningTime="2025-10-03 08:54:17.780237139 +0000 UTC m=+831.887722092" Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.795999 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-lpwr4" podStartSLOduration=3.831817419 podStartE2EDuration="25.795980854s" podCreationTimestamp="2025-10-03 08:53:52 +0000 UTC" firstStartedPulling="2025-10-03 08:53:54.663548564 +0000 UTC m=+808.771033517" lastFinishedPulling="2025-10-03 08:54:16.627711999 +0000 UTC m=+830.735196952" observedRunningTime="2025-10-03 08:54:17.794808518 +0000 UTC m=+831.902293491" watchObservedRunningTime="2025-10-03 08:54:17.795980854 +0000 UTC m=+831.903465807" Oct 03 08:54:17 crc kubenswrapper[4899]: I1003 08:54:17.811155 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tjx4z" podStartSLOduration=3.615253413 podStartE2EDuration="12.811137032s" podCreationTimestamp="2025-10-03 08:54:05 +0000 UTC" firstStartedPulling="2025-10-03 08:54:07.459519382 +0000 UTC m=+821.567004335" lastFinishedPulling="2025-10-03 08:54:16.655403001 +0000 UTC m=+830.762887954" observedRunningTime="2025-10-03 08:54:17.809270703 +0000 UTC m=+831.916755656" watchObservedRunningTime="2025-10-03 08:54:17.811137032 +0000 UTC m=+831.918621985" Oct 03 08:54:19 crc kubenswrapper[4899]: I1003 08:54:19.320093 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-465sx" Oct 03 08:54:19 crc kubenswrapper[4899]: I1003 08:54:19.320146 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-465sx" Oct 03 08:54:19 crc kubenswrapper[4899]: I1003 08:54:19.362755 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-465sx" Oct 03 08:54:19 crc kubenswrapper[4899]: I1003 08:54:19.757549 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-465sx" Oct 03 08:54:20 crc kubenswrapper[4899]: I1003 08:54:20.379656 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-465sx"] Oct 03 08:54:21 crc kubenswrapper[4899]: I1003 08:54:21.140170 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rp2xc" Oct 03 08:54:21 crc kubenswrapper[4899]: I1003 08:54:21.140253 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rp2xc" Oct 03 08:54:21 crc kubenswrapper[4899]: I1003 08:54:21.187074 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rp2xc" Oct 03 08:54:21 crc kubenswrapper[4899]: I1003 08:54:21.720571 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gbhrz" Oct 03 08:54:21 crc kubenswrapper[4899]: I1003 08:54:21.721806 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gbhrz" Oct 03 08:54:21 crc kubenswrapper[4899]: I1003 08:54:21.721840 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-465sx" podUID="721b0e3a-b699-43a6-b9f5-43e5e52b1d7a" containerName="registry-server" containerID="cri-o://31b3eeea9157431ad05c2219f921c9b17d14b8a4d82516a68f139c3cd7a97092" gracePeriod=2 Oct 03 08:54:21 crc kubenswrapper[4899]: I1003 08:54:21.764284 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gbhrz" Oct 03 08:54:21 crc kubenswrapper[4899]: I1003 08:54:21.765814 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rp2xc" Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.131324 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-465sx" Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.236020 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rccx7\" (UniqueName: \"kubernetes.io/projected/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a-kube-api-access-rccx7\") pod \"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a\" (UID: \"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a\") " Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.236169 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a-utilities\") pod \"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a\" (UID: \"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a\") " Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.236270 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a-catalog-content\") pod \"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a\" (UID: \"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a\") " Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.237037 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a-utilities" (OuterVolumeSpecName: "utilities") pod "721b0e3a-b699-43a6-b9f5-43e5e52b1d7a" (UID: "721b0e3a-b699-43a6-b9f5-43e5e52b1d7a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.241859 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a-kube-api-access-rccx7" (OuterVolumeSpecName: "kube-api-access-rccx7") pod "721b0e3a-b699-43a6-b9f5-43e5e52b1d7a" (UID: "721b0e3a-b699-43a6-b9f5-43e5e52b1d7a"). InnerVolumeSpecName "kube-api-access-rccx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.247474 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "721b0e3a-b699-43a6-b9f5-43e5e52b1d7a" (UID: "721b0e3a-b699-43a6-b9f5-43e5e52b1d7a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.337660 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rccx7\" (UniqueName: \"kubernetes.io/projected/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a-kube-api-access-rccx7\") on node \"crc\" DevicePath \"\"" Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.337699 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.337713 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.732319 4899 generic.go:334] "Generic (PLEG): container finished" podID="721b0e3a-b699-43a6-b9f5-43e5e52b1d7a" containerID="31b3eeea9157431ad05c2219f921c9b17d14b8a4d82516a68f139c3cd7a97092" exitCode=0 Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.732381 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-465sx" Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.732381 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-465sx" event={"ID":"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a","Type":"ContainerDied","Data":"31b3eeea9157431ad05c2219f921c9b17d14b8a4d82516a68f139c3cd7a97092"} Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.732923 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-465sx" event={"ID":"721b0e3a-b699-43a6-b9f5-43e5e52b1d7a","Type":"ContainerDied","Data":"3fdbf74abf04e267a3da2725fec772a6703a152cc734cb7d7e7d2480b26e7023"} Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.732981 4899 scope.go:117] "RemoveContainer" containerID="31b3eeea9157431ad05c2219f921c9b17d14b8a4d82516a68f139c3cd7a97092" Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.759207 4899 scope.go:117] "RemoveContainer" containerID="1a93ce15f3289958d00764b7f4c57d5981facd300b0085163de3ddd5cfa46690" Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.759270 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-465sx"] Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.764444 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-465sx"] Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.781753 4899 scope.go:117] "RemoveContainer" containerID="373e0ca145b58299804fbfc46040f60e02903bb90bd441965f8b690328054f90" Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.785287 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gbhrz" Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.813634 4899 scope.go:117] "RemoveContainer" containerID="31b3eeea9157431ad05c2219f921c9b17d14b8a4d82516a68f139c3cd7a97092" Oct 03 08:54:22 crc kubenswrapper[4899]: E1003 08:54:22.814087 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31b3eeea9157431ad05c2219f921c9b17d14b8a4d82516a68f139c3cd7a97092\": container with ID starting with 31b3eeea9157431ad05c2219f921c9b17d14b8a4d82516a68f139c3cd7a97092 not found: ID does not exist" containerID="31b3eeea9157431ad05c2219f921c9b17d14b8a4d82516a68f139c3cd7a97092" Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.814140 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b3eeea9157431ad05c2219f921c9b17d14b8a4d82516a68f139c3cd7a97092"} err="failed to get container status \"31b3eeea9157431ad05c2219f921c9b17d14b8a4d82516a68f139c3cd7a97092\": rpc error: code = NotFound desc = could not find container \"31b3eeea9157431ad05c2219f921c9b17d14b8a4d82516a68f139c3cd7a97092\": container with ID starting with 31b3eeea9157431ad05c2219f921c9b17d14b8a4d82516a68f139c3cd7a97092 not found: ID does not exist" Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.814162 4899 scope.go:117] "RemoveContainer" containerID="1a93ce15f3289958d00764b7f4c57d5981facd300b0085163de3ddd5cfa46690" Oct 03 08:54:22 crc kubenswrapper[4899]: E1003 08:54:22.815409 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a93ce15f3289958d00764b7f4c57d5981facd300b0085163de3ddd5cfa46690\": container with ID starting with 1a93ce15f3289958d00764b7f4c57d5981facd300b0085163de3ddd5cfa46690 not found: ID does not exist" containerID="1a93ce15f3289958d00764b7f4c57d5981facd300b0085163de3ddd5cfa46690" Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.815431 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a93ce15f3289958d00764b7f4c57d5981facd300b0085163de3ddd5cfa46690"} err="failed to get container status \"1a93ce15f3289958d00764b7f4c57d5981facd300b0085163de3ddd5cfa46690\": rpc error: code = NotFound desc = could not find container \"1a93ce15f3289958d00764b7f4c57d5981facd300b0085163de3ddd5cfa46690\": container with ID starting with 1a93ce15f3289958d00764b7f4c57d5981facd300b0085163de3ddd5cfa46690 not found: ID does not exist" Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.815444 4899 scope.go:117] "RemoveContainer" containerID="373e0ca145b58299804fbfc46040f60e02903bb90bd441965f8b690328054f90" Oct 03 08:54:22 crc kubenswrapper[4899]: E1003 08:54:22.815690 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"373e0ca145b58299804fbfc46040f60e02903bb90bd441965f8b690328054f90\": container with ID starting with 373e0ca145b58299804fbfc46040f60e02903bb90bd441965f8b690328054f90 not found: ID does not exist" containerID="373e0ca145b58299804fbfc46040f60e02903bb90bd441965f8b690328054f90" Oct 03 08:54:22 crc kubenswrapper[4899]: I1003 08:54:22.815713 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"373e0ca145b58299804fbfc46040f60e02903bb90bd441965f8b690328054f90"} err="failed to get container status \"373e0ca145b58299804fbfc46040f60e02903bb90bd441965f8b690328054f90\": rpc error: code = NotFound desc = could not find container \"373e0ca145b58299804fbfc46040f60e02903bb90bd441965f8b690328054f90\": container with ID starting with 373e0ca145b58299804fbfc46040f60e02903bb90bd441965f8b690328054f90 not found: ID does not exist" Oct 03 08:54:23 crc kubenswrapper[4899]: I1003 08:54:23.091247 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-7psz6" Oct 03 08:54:23 crc kubenswrapper[4899]: I1003 08:54:23.143521 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-9qcj2" Oct 03 08:54:23 crc kubenswrapper[4899]: I1003 08:54:23.150730 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-bnb75" Oct 03 08:54:23 crc kubenswrapper[4899]: I1003 08:54:23.232670 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-f5mgb" Oct 03 08:54:23 crc kubenswrapper[4899]: I1003 08:54:23.236125 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zvwbw" Oct 03 08:54:23 crc kubenswrapper[4899]: I1003 08:54:23.304446 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-lpwr4" Oct 03 08:54:23 crc kubenswrapper[4899]: I1003 08:54:23.338274 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-lxmmw" Oct 03 08:54:23 crc kubenswrapper[4899]: I1003 08:54:23.577356 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rp2xc"] Oct 03 08:54:23 crc kubenswrapper[4899]: I1003 08:54:23.739837 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rp2xc" podUID="2fdc02e4-5ae5-4609-9946-b4bc62fa5687" containerName="registry-server" containerID="cri-o://925c561cc4fa89740e0e830780edf5fe4c56cb33b556208a24bd8794e7cedf9b" gracePeriod=2 Oct 03 08:54:24 crc kubenswrapper[4899]: I1003 08:54:24.175364 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gbhrz"] Oct 03 08:54:24 crc kubenswrapper[4899]: I1003 08:54:24.535667 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="721b0e3a-b699-43a6-b9f5-43e5e52b1d7a" path="/var/lib/kubelet/pods/721b0e3a-b699-43a6-b9f5-43e5e52b1d7a/volumes" Oct 03 08:54:24 crc kubenswrapper[4899]: I1003 08:54:24.748548 4899 generic.go:334] "Generic (PLEG): container finished" podID="2fdc02e4-5ae5-4609-9946-b4bc62fa5687" containerID="925c561cc4fa89740e0e830780edf5fe4c56cb33b556208a24bd8794e7cedf9b" exitCode=0 Oct 03 08:54:24 crc kubenswrapper[4899]: I1003 08:54:24.748642 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rp2xc" event={"ID":"2fdc02e4-5ae5-4609-9946-b4bc62fa5687","Type":"ContainerDied","Data":"925c561cc4fa89740e0e830780edf5fe4c56cb33b556208a24bd8794e7cedf9b"} Oct 03 08:54:24 crc kubenswrapper[4899]: I1003 08:54:24.748814 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gbhrz" podUID="eaeb11f4-25fe-42cc-8bf4-0d6c93803c16" containerName="registry-server" containerID="cri-o://2b9fb7345e49f8df16445b980a4cf8cd676061d063b9774d2706e7e55d814ba7" gracePeriod=2 Oct 03 08:54:25 crc kubenswrapper[4899]: I1003 08:54:25.562006 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tjx4z" Oct 03 08:54:25 crc kubenswrapper[4899]: I1003 08:54:25.562075 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tjx4z" Oct 03 08:54:25 crc kubenswrapper[4899]: I1003 08:54:25.604888 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tjx4z" Oct 03 08:54:25 crc kubenswrapper[4899]: I1003 08:54:25.757409 4899 generic.go:334] "Generic (PLEG): container finished" podID="eaeb11f4-25fe-42cc-8bf4-0d6c93803c16" containerID="2b9fb7345e49f8df16445b980a4cf8cd676061d063b9774d2706e7e55d814ba7" exitCode=0 Oct 03 08:54:25 crc kubenswrapper[4899]: I1003 08:54:25.757434 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gbhrz" event={"ID":"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16","Type":"ContainerDied","Data":"2b9fb7345e49f8df16445b980a4cf8cd676061d063b9774d2706e7e55d814ba7"} Oct 03 08:54:25 crc kubenswrapper[4899]: I1003 08:54:25.797578 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tjx4z" Oct 03 08:54:26 crc kubenswrapper[4899]: I1003 08:54:26.115746 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rp2xc" Oct 03 08:54:26 crc kubenswrapper[4899]: I1003 08:54:26.199630 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fdc02e4-5ae5-4609-9946-b4bc62fa5687-utilities\") pod \"2fdc02e4-5ae5-4609-9946-b4bc62fa5687\" (UID: \"2fdc02e4-5ae5-4609-9946-b4bc62fa5687\") " Oct 03 08:54:26 crc kubenswrapper[4899]: I1003 08:54:26.199692 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khpbx\" (UniqueName: \"kubernetes.io/projected/2fdc02e4-5ae5-4609-9946-b4bc62fa5687-kube-api-access-khpbx\") pod \"2fdc02e4-5ae5-4609-9946-b4bc62fa5687\" (UID: \"2fdc02e4-5ae5-4609-9946-b4bc62fa5687\") " Oct 03 08:54:26 crc kubenswrapper[4899]: I1003 08:54:26.199784 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fdc02e4-5ae5-4609-9946-b4bc62fa5687-catalog-content\") pod \"2fdc02e4-5ae5-4609-9946-b4bc62fa5687\" (UID: \"2fdc02e4-5ae5-4609-9946-b4bc62fa5687\") " Oct 03 08:54:26 crc kubenswrapper[4899]: I1003 08:54:26.200375 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fdc02e4-5ae5-4609-9946-b4bc62fa5687-utilities" (OuterVolumeSpecName: "utilities") pod "2fdc02e4-5ae5-4609-9946-b4bc62fa5687" (UID: "2fdc02e4-5ae5-4609-9946-b4bc62fa5687"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:54:26 crc kubenswrapper[4899]: I1003 08:54:26.206115 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fdc02e4-5ae5-4609-9946-b4bc62fa5687-kube-api-access-khpbx" (OuterVolumeSpecName: "kube-api-access-khpbx") pod "2fdc02e4-5ae5-4609-9946-b4bc62fa5687" (UID: "2fdc02e4-5ae5-4609-9946-b4bc62fa5687"). InnerVolumeSpecName "kube-api-access-khpbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:54:26 crc kubenswrapper[4899]: I1003 08:54:26.244454 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fdc02e4-5ae5-4609-9946-b4bc62fa5687-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2fdc02e4-5ae5-4609-9946-b4bc62fa5687" (UID: "2fdc02e4-5ae5-4609-9946-b4bc62fa5687"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:54:26 crc kubenswrapper[4899]: I1003 08:54:26.301569 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fdc02e4-5ae5-4609-9946-b4bc62fa5687-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 08:54:26 crc kubenswrapper[4899]: I1003 08:54:26.301608 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fdc02e4-5ae5-4609-9946-b4bc62fa5687-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 08:54:26 crc kubenswrapper[4899]: I1003 08:54:26.301624 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khpbx\" (UniqueName: \"kubernetes.io/projected/2fdc02e4-5ae5-4609-9946-b4bc62fa5687-kube-api-access-khpbx\") on node \"crc\" DevicePath \"\"" Oct 03 08:54:26 crc kubenswrapper[4899]: I1003 08:54:26.766963 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rp2xc" event={"ID":"2fdc02e4-5ae5-4609-9946-b4bc62fa5687","Type":"ContainerDied","Data":"2b125d8750b07725fe3397ef3eb581a65f4b0e212dbf88558ce29ac8e56cae00"} Oct 03 08:54:26 crc kubenswrapper[4899]: I1003 08:54:26.767040 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rp2xc" Oct 03 08:54:26 crc kubenswrapper[4899]: I1003 08:54:26.767239 4899 scope.go:117] "RemoveContainer" containerID="925c561cc4fa89740e0e830780edf5fe4c56cb33b556208a24bd8794e7cedf9b" Oct 03 08:54:26 crc kubenswrapper[4899]: I1003 08:54:26.805604 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rp2xc"] Oct 03 08:54:26 crc kubenswrapper[4899]: I1003 08:54:26.807726 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rp2xc"] Oct 03 08:54:26 crc kubenswrapper[4899]: I1003 08:54:26.814503 4899 scope.go:117] "RemoveContainer" containerID="f9885125a6f6f811401e31f990b7875027cd7c2006f42832e20e6b86a99f35f6" Oct 03 08:54:26 crc kubenswrapper[4899]: I1003 08:54:26.839133 4899 scope.go:117] "RemoveContainer" containerID="dce067edaf2417a8cd56ca66e7d60207bdd46ab3d054dafd5d7a401d0ecd1850" Oct 03 08:54:26 crc kubenswrapper[4899]: I1003 08:54:26.975404 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gbhrz" Oct 03 08:54:27 crc kubenswrapper[4899]: I1003 08:54:27.116161 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rl9cv\" (UniqueName: \"kubernetes.io/projected/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16-kube-api-access-rl9cv\") pod \"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16\" (UID: \"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16\") " Oct 03 08:54:27 crc kubenswrapper[4899]: I1003 08:54:27.116338 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16-catalog-content\") pod \"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16\" (UID: \"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16\") " Oct 03 08:54:27 crc kubenswrapper[4899]: I1003 08:54:27.116397 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16-utilities\") pod \"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16\" (UID: \"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16\") " Oct 03 08:54:27 crc kubenswrapper[4899]: I1003 08:54:27.117539 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16-utilities" (OuterVolumeSpecName: "utilities") pod "eaeb11f4-25fe-42cc-8bf4-0d6c93803c16" (UID: "eaeb11f4-25fe-42cc-8bf4-0d6c93803c16"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:54:27 crc kubenswrapper[4899]: I1003 08:54:27.122107 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16-kube-api-access-rl9cv" (OuterVolumeSpecName: "kube-api-access-rl9cv") pod "eaeb11f4-25fe-42cc-8bf4-0d6c93803c16" (UID: "eaeb11f4-25fe-42cc-8bf4-0d6c93803c16"). InnerVolumeSpecName "kube-api-access-rl9cv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:54:27 crc kubenswrapper[4899]: I1003 08:54:27.165184 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eaeb11f4-25fe-42cc-8bf4-0d6c93803c16" (UID: "eaeb11f4-25fe-42cc-8bf4-0d6c93803c16"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:54:27 crc kubenswrapper[4899]: I1003 08:54:27.217797 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 08:54:27 crc kubenswrapper[4899]: I1003 08:54:27.217830 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 08:54:27 crc kubenswrapper[4899]: I1003 08:54:27.217839 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rl9cv\" (UniqueName: \"kubernetes.io/projected/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16-kube-api-access-rl9cv\") on node \"crc\" DevicePath \"\"" Oct 03 08:54:27 crc kubenswrapper[4899]: I1003 08:54:27.775341 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gbhrz" event={"ID":"eaeb11f4-25fe-42cc-8bf4-0d6c93803c16","Type":"ContainerDied","Data":"ca86fa9458587b528bfd5fbd222c61946e3923f605c8b9ba15971d1feb510ca7"} Oct 03 08:54:27 crc kubenswrapper[4899]: I1003 08:54:27.775371 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gbhrz" Oct 03 08:54:27 crc kubenswrapper[4899]: I1003 08:54:27.775693 4899 scope.go:117] "RemoveContainer" containerID="2b9fb7345e49f8df16445b980a4cf8cd676061d063b9774d2706e7e55d814ba7" Oct 03 08:54:27 crc kubenswrapper[4899]: I1003 08:54:27.793116 4899 scope.go:117] "RemoveContainer" containerID="3ff8bb903c70481714ef070a58c5a04e1ad6d73b93780550e8f45aa7e60473b5" Oct 03 08:54:27 crc kubenswrapper[4899]: I1003 08:54:27.807880 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gbhrz"] Oct 03 08:54:27 crc kubenswrapper[4899]: I1003 08:54:27.811589 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gbhrz"] Oct 03 08:54:27 crc kubenswrapper[4899]: I1003 08:54:27.822014 4899 scope.go:117] "RemoveContainer" containerID="86eb7c935837bf6427ff83a227122c8824bcf4bf8bd2dfdcad2618566179863a" Oct 03 08:54:28 crc kubenswrapper[4899]: I1003 08:54:28.535600 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fdc02e4-5ae5-4609-9946-b4bc62fa5687" path="/var/lib/kubelet/pods/2fdc02e4-5ae5-4609-9946-b4bc62fa5687/volumes" Oct 03 08:54:28 crc kubenswrapper[4899]: I1003 08:54:28.536506 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eaeb11f4-25fe-42cc-8bf4-0d6c93803c16" path="/var/lib/kubelet/pods/eaeb11f4-25fe-42cc-8bf4-0d6c93803c16/volumes" Oct 03 08:54:28 crc kubenswrapper[4899]: I1003 08:54:28.577718 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tjx4z"] Oct 03 08:54:28 crc kubenswrapper[4899]: I1003 08:54:28.577941 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tjx4z" podUID="f3d43d09-2fb5-4994-b485-fc405297d47b" containerName="registry-server" containerID="cri-o://dd6d67320743be235fdaae990dda97103fd800bbea3d71b7ff2e220899ebc05b" gracePeriod=2 Oct 03 08:54:28 crc kubenswrapper[4899]: I1003 08:54:28.792165 4899 generic.go:334] "Generic (PLEG): container finished" podID="f3d43d09-2fb5-4994-b485-fc405297d47b" containerID="dd6d67320743be235fdaae990dda97103fd800bbea3d71b7ff2e220899ebc05b" exitCode=0 Oct 03 08:54:28 crc kubenswrapper[4899]: I1003 08:54:28.792260 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjx4z" event={"ID":"f3d43d09-2fb5-4994-b485-fc405297d47b","Type":"ContainerDied","Data":"dd6d67320743be235fdaae990dda97103fd800bbea3d71b7ff2e220899ebc05b"} Oct 03 08:54:28 crc kubenswrapper[4899]: I1003 08:54:28.977694 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tjx4z" Oct 03 08:54:29 crc kubenswrapper[4899]: I1003 08:54:29.038405 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3d43d09-2fb5-4994-b485-fc405297d47b-utilities\") pod \"f3d43d09-2fb5-4994-b485-fc405297d47b\" (UID: \"f3d43d09-2fb5-4994-b485-fc405297d47b\") " Oct 03 08:54:29 crc kubenswrapper[4899]: I1003 08:54:29.038486 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3d43d09-2fb5-4994-b485-fc405297d47b-catalog-content\") pod \"f3d43d09-2fb5-4994-b485-fc405297d47b\" (UID: \"f3d43d09-2fb5-4994-b485-fc405297d47b\") " Oct 03 08:54:29 crc kubenswrapper[4899]: I1003 08:54:29.038573 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5h5v\" (UniqueName: \"kubernetes.io/projected/f3d43d09-2fb5-4994-b485-fc405297d47b-kube-api-access-b5h5v\") pod \"f3d43d09-2fb5-4994-b485-fc405297d47b\" (UID: \"f3d43d09-2fb5-4994-b485-fc405297d47b\") " Oct 03 08:54:29 crc kubenswrapper[4899]: I1003 08:54:29.039510 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3d43d09-2fb5-4994-b485-fc405297d47b-utilities" (OuterVolumeSpecName: "utilities") pod "f3d43d09-2fb5-4994-b485-fc405297d47b" (UID: "f3d43d09-2fb5-4994-b485-fc405297d47b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:54:29 crc kubenswrapper[4899]: I1003 08:54:29.043662 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3d43d09-2fb5-4994-b485-fc405297d47b-kube-api-access-b5h5v" (OuterVolumeSpecName: "kube-api-access-b5h5v") pod "f3d43d09-2fb5-4994-b485-fc405297d47b" (UID: "f3d43d09-2fb5-4994-b485-fc405297d47b"). InnerVolumeSpecName "kube-api-access-b5h5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:54:29 crc kubenswrapper[4899]: I1003 08:54:29.117604 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3d43d09-2fb5-4994-b485-fc405297d47b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f3d43d09-2fb5-4994-b485-fc405297d47b" (UID: "f3d43d09-2fb5-4994-b485-fc405297d47b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:54:29 crc kubenswrapper[4899]: I1003 08:54:29.140167 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3d43d09-2fb5-4994-b485-fc405297d47b-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 08:54:29 crc kubenswrapper[4899]: I1003 08:54:29.140195 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3d43d09-2fb5-4994-b485-fc405297d47b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 08:54:29 crc kubenswrapper[4899]: I1003 08:54:29.140231 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5h5v\" (UniqueName: \"kubernetes.io/projected/f3d43d09-2fb5-4994-b485-fc405297d47b-kube-api-access-b5h5v\") on node \"crc\" DevicePath \"\"" Oct 03 08:54:29 crc kubenswrapper[4899]: I1003 08:54:29.806637 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjx4z" event={"ID":"f3d43d09-2fb5-4994-b485-fc405297d47b","Type":"ContainerDied","Data":"9166497e92b0503d789b1c57760ed99a903ffde7572e201ac9b358461038b415"} Oct 03 08:54:29 crc kubenswrapper[4899]: I1003 08:54:29.806714 4899 scope.go:117] "RemoveContainer" containerID="dd6d67320743be235fdaae990dda97103fd800bbea3d71b7ff2e220899ebc05b" Oct 03 08:54:29 crc kubenswrapper[4899]: I1003 08:54:29.806794 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tjx4z" Oct 03 08:54:29 crc kubenswrapper[4899]: I1003 08:54:29.829067 4899 scope.go:117] "RemoveContainer" containerID="3b9f076fb3afe15d2debdd6cd60405d49e751d70e5978878a831241fc5d123af" Oct 03 08:54:29 crc kubenswrapper[4899]: I1003 08:54:29.851375 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tjx4z"] Oct 03 08:54:29 crc kubenswrapper[4899]: I1003 08:54:29.853572 4899 scope.go:117] "RemoveContainer" containerID="70bac5f66c6062ce580e768999364aeb07d3fa36d1d095cffc5cdc3228527b7f" Oct 03 08:54:29 crc kubenswrapper[4899]: I1003 08:54:29.859077 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tjx4z"] Oct 03 08:54:30 crc kubenswrapper[4899]: I1003 08:54:30.543433 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3d43d09-2fb5-4994-b485-fc405297d47b" path="/var/lib/kubelet/pods/f3d43d09-2fb5-4994-b485-fc405297d47b/volumes" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.831634 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-6rx7j"] Oct 03 08:54:40 crc kubenswrapper[4899]: E1003 08:54:40.832905 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3d43d09-2fb5-4994-b485-fc405297d47b" containerName="registry-server" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.832921 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3d43d09-2fb5-4994-b485-fc405297d47b" containerName="registry-server" Oct 03 08:54:40 crc kubenswrapper[4899]: E1003 08:54:40.832943 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaeb11f4-25fe-42cc-8bf4-0d6c93803c16" containerName="registry-server" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.832949 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaeb11f4-25fe-42cc-8bf4-0d6c93803c16" containerName="registry-server" Oct 03 08:54:40 crc kubenswrapper[4899]: E1003 08:54:40.832969 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="721b0e3a-b699-43a6-b9f5-43e5e52b1d7a" containerName="extract-utilities" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.832975 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="721b0e3a-b699-43a6-b9f5-43e5e52b1d7a" containerName="extract-utilities" Oct 03 08:54:40 crc kubenswrapper[4899]: E1003 08:54:40.832982 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaeb11f4-25fe-42cc-8bf4-0d6c93803c16" containerName="extract-content" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.833956 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaeb11f4-25fe-42cc-8bf4-0d6c93803c16" containerName="extract-content" Oct 03 08:54:40 crc kubenswrapper[4899]: E1003 08:54:40.834012 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fdc02e4-5ae5-4609-9946-b4bc62fa5687" containerName="registry-server" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.834019 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fdc02e4-5ae5-4609-9946-b4bc62fa5687" containerName="registry-server" Oct 03 08:54:40 crc kubenswrapper[4899]: E1003 08:54:40.834036 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fdc02e4-5ae5-4609-9946-b4bc62fa5687" containerName="extract-utilities" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.834045 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fdc02e4-5ae5-4609-9946-b4bc62fa5687" containerName="extract-utilities" Oct 03 08:54:40 crc kubenswrapper[4899]: E1003 08:54:40.834091 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fdc02e4-5ae5-4609-9946-b4bc62fa5687" containerName="extract-content" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.834098 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fdc02e4-5ae5-4609-9946-b4bc62fa5687" containerName="extract-content" Oct 03 08:54:40 crc kubenswrapper[4899]: E1003 08:54:40.834109 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3d43d09-2fb5-4994-b485-fc405297d47b" containerName="extract-content" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.834134 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3d43d09-2fb5-4994-b485-fc405297d47b" containerName="extract-content" Oct 03 08:54:40 crc kubenswrapper[4899]: E1003 08:54:40.834167 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="721b0e3a-b699-43a6-b9f5-43e5e52b1d7a" containerName="registry-server" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.834174 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="721b0e3a-b699-43a6-b9f5-43e5e52b1d7a" containerName="registry-server" Oct 03 08:54:40 crc kubenswrapper[4899]: E1003 08:54:40.834183 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="721b0e3a-b699-43a6-b9f5-43e5e52b1d7a" containerName="extract-content" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.834189 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="721b0e3a-b699-43a6-b9f5-43e5e52b1d7a" containerName="extract-content" Oct 03 08:54:40 crc kubenswrapper[4899]: E1003 08:54:40.834204 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaeb11f4-25fe-42cc-8bf4-0d6c93803c16" containerName="extract-utilities" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.834211 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaeb11f4-25fe-42cc-8bf4-0d6c93803c16" containerName="extract-utilities" Oct 03 08:54:40 crc kubenswrapper[4899]: E1003 08:54:40.834230 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3d43d09-2fb5-4994-b485-fc405297d47b" containerName="extract-utilities" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.834237 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3d43d09-2fb5-4994-b485-fc405297d47b" containerName="extract-utilities" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.834507 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="721b0e3a-b699-43a6-b9f5-43e5e52b1d7a" containerName="registry-server" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.834536 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fdc02e4-5ae5-4609-9946-b4bc62fa5687" containerName="registry-server" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.834549 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="eaeb11f4-25fe-42cc-8bf4-0d6c93803c16" containerName="registry-server" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.834558 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3d43d09-2fb5-4994-b485-fc405297d47b" containerName="registry-server" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.835262 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-6rx7j" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.845401 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.845451 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.846122 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-gp48h" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.846227 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.852134 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-6rx7j"] Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.892770 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-kd4pl"] Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.893919 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-kd4pl" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.894532 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-497xw\" (UniqueName: \"kubernetes.io/projected/11754825-8dda-4c30-81c7-bf8bc000600b-kube-api-access-497xw\") pod \"dnsmasq-dns-675f4bcbfc-6rx7j\" (UID: \"11754825-8dda-4c30-81c7-bf8bc000600b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-6rx7j" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.894666 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11754825-8dda-4c30-81c7-bf8bc000600b-config\") pod \"dnsmasq-dns-675f4bcbfc-6rx7j\" (UID: \"11754825-8dda-4c30-81c7-bf8bc000600b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-6rx7j" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.897294 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.916776 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-kd4pl"] Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.995942 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-497xw\" (UniqueName: \"kubernetes.io/projected/11754825-8dda-4c30-81c7-bf8bc000600b-kube-api-access-497xw\") pod \"dnsmasq-dns-675f4bcbfc-6rx7j\" (UID: \"11754825-8dda-4c30-81c7-bf8bc000600b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-6rx7j" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.996043 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjphb\" (UniqueName: \"kubernetes.io/projected/cc3b51d4-8ad5-4106-9c0e-68fc133d1818-kube-api-access-rjphb\") pod \"dnsmasq-dns-78dd6ddcc-kd4pl\" (UID: \"cc3b51d4-8ad5-4106-9c0e-68fc133d1818\") " pod="openstack/dnsmasq-dns-78dd6ddcc-kd4pl" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.996081 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11754825-8dda-4c30-81c7-bf8bc000600b-config\") pod \"dnsmasq-dns-675f4bcbfc-6rx7j\" (UID: \"11754825-8dda-4c30-81c7-bf8bc000600b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-6rx7j" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.996097 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc3b51d4-8ad5-4106-9c0e-68fc133d1818-config\") pod \"dnsmasq-dns-78dd6ddcc-kd4pl\" (UID: \"cc3b51d4-8ad5-4106-9c0e-68fc133d1818\") " pod="openstack/dnsmasq-dns-78dd6ddcc-kd4pl" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.996118 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cc3b51d4-8ad5-4106-9c0e-68fc133d1818-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-kd4pl\" (UID: \"cc3b51d4-8ad5-4106-9c0e-68fc133d1818\") " pod="openstack/dnsmasq-dns-78dd6ddcc-kd4pl" Oct 03 08:54:40 crc kubenswrapper[4899]: I1003 08:54:40.996958 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11754825-8dda-4c30-81c7-bf8bc000600b-config\") pod \"dnsmasq-dns-675f4bcbfc-6rx7j\" (UID: \"11754825-8dda-4c30-81c7-bf8bc000600b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-6rx7j" Oct 03 08:54:41 crc kubenswrapper[4899]: I1003 08:54:41.016698 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-497xw\" (UniqueName: \"kubernetes.io/projected/11754825-8dda-4c30-81c7-bf8bc000600b-kube-api-access-497xw\") pod \"dnsmasq-dns-675f4bcbfc-6rx7j\" (UID: \"11754825-8dda-4c30-81c7-bf8bc000600b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-6rx7j" Oct 03 08:54:41 crc kubenswrapper[4899]: I1003 08:54:41.097391 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjphb\" (UniqueName: \"kubernetes.io/projected/cc3b51d4-8ad5-4106-9c0e-68fc133d1818-kube-api-access-rjphb\") pod \"dnsmasq-dns-78dd6ddcc-kd4pl\" (UID: \"cc3b51d4-8ad5-4106-9c0e-68fc133d1818\") " pod="openstack/dnsmasq-dns-78dd6ddcc-kd4pl" Oct 03 08:54:41 crc kubenswrapper[4899]: I1003 08:54:41.097463 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc3b51d4-8ad5-4106-9c0e-68fc133d1818-config\") pod \"dnsmasq-dns-78dd6ddcc-kd4pl\" (UID: \"cc3b51d4-8ad5-4106-9c0e-68fc133d1818\") " pod="openstack/dnsmasq-dns-78dd6ddcc-kd4pl" Oct 03 08:54:41 crc kubenswrapper[4899]: I1003 08:54:41.097492 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cc3b51d4-8ad5-4106-9c0e-68fc133d1818-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-kd4pl\" (UID: \"cc3b51d4-8ad5-4106-9c0e-68fc133d1818\") " pod="openstack/dnsmasq-dns-78dd6ddcc-kd4pl" Oct 03 08:54:41 crc kubenswrapper[4899]: I1003 08:54:41.098348 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cc3b51d4-8ad5-4106-9c0e-68fc133d1818-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-kd4pl\" (UID: \"cc3b51d4-8ad5-4106-9c0e-68fc133d1818\") " pod="openstack/dnsmasq-dns-78dd6ddcc-kd4pl" Oct 03 08:54:41 crc kubenswrapper[4899]: I1003 08:54:41.098562 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc3b51d4-8ad5-4106-9c0e-68fc133d1818-config\") pod \"dnsmasq-dns-78dd6ddcc-kd4pl\" (UID: \"cc3b51d4-8ad5-4106-9c0e-68fc133d1818\") " pod="openstack/dnsmasq-dns-78dd6ddcc-kd4pl" Oct 03 08:54:41 crc kubenswrapper[4899]: I1003 08:54:41.116627 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjphb\" (UniqueName: \"kubernetes.io/projected/cc3b51d4-8ad5-4106-9c0e-68fc133d1818-kube-api-access-rjphb\") pod \"dnsmasq-dns-78dd6ddcc-kd4pl\" (UID: \"cc3b51d4-8ad5-4106-9c0e-68fc133d1818\") " pod="openstack/dnsmasq-dns-78dd6ddcc-kd4pl" Oct 03 08:54:41 crc kubenswrapper[4899]: I1003 08:54:41.165489 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-6rx7j" Oct 03 08:54:41 crc kubenswrapper[4899]: I1003 08:54:41.210408 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-kd4pl" Oct 03 08:54:41 crc kubenswrapper[4899]: I1003 08:54:41.532122 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-kd4pl"] Oct 03 08:54:41 crc kubenswrapper[4899]: W1003 08:54:41.534810 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc3b51d4_8ad5_4106_9c0e_68fc133d1818.slice/crio-a5d5cc7c02f6f2b7feae63cf4013d998d62161546b5666088963cb2e5931d7c3 WatchSource:0}: Error finding container a5d5cc7c02f6f2b7feae63cf4013d998d62161546b5666088963cb2e5931d7c3: Status 404 returned error can't find the container with id a5d5cc7c02f6f2b7feae63cf4013d998d62161546b5666088963cb2e5931d7c3 Oct 03 08:54:41 crc kubenswrapper[4899]: I1003 08:54:41.594237 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-6rx7j"] Oct 03 08:54:41 crc kubenswrapper[4899]: W1003 08:54:41.599316 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11754825_8dda_4c30_81c7_bf8bc000600b.slice/crio-465869c501f3f34a593081a18294636badbbf9683efcce1c1bebbd1fcd60e783 WatchSource:0}: Error finding container 465869c501f3f34a593081a18294636badbbf9683efcce1c1bebbd1fcd60e783: Status 404 returned error can't find the container with id 465869c501f3f34a593081a18294636badbbf9683efcce1c1bebbd1fcd60e783 Oct 03 08:54:41 crc kubenswrapper[4899]: I1003 08:54:41.878766 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-kd4pl" event={"ID":"cc3b51d4-8ad5-4106-9c0e-68fc133d1818","Type":"ContainerStarted","Data":"a5d5cc7c02f6f2b7feae63cf4013d998d62161546b5666088963cb2e5931d7c3"} Oct 03 08:54:41 crc kubenswrapper[4899]: I1003 08:54:41.879769 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-6rx7j" event={"ID":"11754825-8dda-4c30-81c7-bf8bc000600b","Type":"ContainerStarted","Data":"465869c501f3f34a593081a18294636badbbf9683efcce1c1bebbd1fcd60e783"} Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.301217 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-6rx7j"] Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.326709 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-jgjl2"] Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.332127 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-jgjl2" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.373401 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-jgjl2"] Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.449503 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f586d\" (UniqueName: \"kubernetes.io/projected/c605056d-832f-415a-abb2-a62b9f72a17f-kube-api-access-f586d\") pod \"dnsmasq-dns-666b6646f7-jgjl2\" (UID: \"c605056d-832f-415a-abb2-a62b9f72a17f\") " pod="openstack/dnsmasq-dns-666b6646f7-jgjl2" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.449557 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c605056d-832f-415a-abb2-a62b9f72a17f-config\") pod \"dnsmasq-dns-666b6646f7-jgjl2\" (UID: \"c605056d-832f-415a-abb2-a62b9f72a17f\") " pod="openstack/dnsmasq-dns-666b6646f7-jgjl2" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.449623 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c605056d-832f-415a-abb2-a62b9f72a17f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-jgjl2\" (UID: \"c605056d-832f-415a-abb2-a62b9f72a17f\") " pod="openstack/dnsmasq-dns-666b6646f7-jgjl2" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.554158 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c605056d-832f-415a-abb2-a62b9f72a17f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-jgjl2\" (UID: \"c605056d-832f-415a-abb2-a62b9f72a17f\") " pod="openstack/dnsmasq-dns-666b6646f7-jgjl2" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.554413 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f586d\" (UniqueName: \"kubernetes.io/projected/c605056d-832f-415a-abb2-a62b9f72a17f-kube-api-access-f586d\") pod \"dnsmasq-dns-666b6646f7-jgjl2\" (UID: \"c605056d-832f-415a-abb2-a62b9f72a17f\") " pod="openstack/dnsmasq-dns-666b6646f7-jgjl2" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.554605 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c605056d-832f-415a-abb2-a62b9f72a17f-config\") pod \"dnsmasq-dns-666b6646f7-jgjl2\" (UID: \"c605056d-832f-415a-abb2-a62b9f72a17f\") " pod="openstack/dnsmasq-dns-666b6646f7-jgjl2" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.555224 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c605056d-832f-415a-abb2-a62b9f72a17f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-jgjl2\" (UID: \"c605056d-832f-415a-abb2-a62b9f72a17f\") " pod="openstack/dnsmasq-dns-666b6646f7-jgjl2" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.558687 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c605056d-832f-415a-abb2-a62b9f72a17f-config\") pod \"dnsmasq-dns-666b6646f7-jgjl2\" (UID: \"c605056d-832f-415a-abb2-a62b9f72a17f\") " pod="openstack/dnsmasq-dns-666b6646f7-jgjl2" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.601149 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f586d\" (UniqueName: \"kubernetes.io/projected/c605056d-832f-415a-abb2-a62b9f72a17f-kube-api-access-f586d\") pod \"dnsmasq-dns-666b6646f7-jgjl2\" (UID: \"c605056d-832f-415a-abb2-a62b9f72a17f\") " pod="openstack/dnsmasq-dns-666b6646f7-jgjl2" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.618251 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-kd4pl"] Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.642116 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-p2bpp"] Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.643197 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-p2bpp" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.654461 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-p2bpp"] Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.661664 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-jgjl2" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.761606 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e21bf72-5fd2-409e-9379-6ff3432aaf15-config\") pod \"dnsmasq-dns-57d769cc4f-p2bpp\" (UID: \"8e21bf72-5fd2-409e-9379-6ff3432aaf15\") " pod="openstack/dnsmasq-dns-57d769cc4f-p2bpp" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.761696 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84gmk\" (UniqueName: \"kubernetes.io/projected/8e21bf72-5fd2-409e-9379-6ff3432aaf15-kube-api-access-84gmk\") pod \"dnsmasq-dns-57d769cc4f-p2bpp\" (UID: \"8e21bf72-5fd2-409e-9379-6ff3432aaf15\") " pod="openstack/dnsmasq-dns-57d769cc4f-p2bpp" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.761751 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e21bf72-5fd2-409e-9379-6ff3432aaf15-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-p2bpp\" (UID: \"8e21bf72-5fd2-409e-9379-6ff3432aaf15\") " pod="openstack/dnsmasq-dns-57d769cc4f-p2bpp" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.863367 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e21bf72-5fd2-409e-9379-6ff3432aaf15-config\") pod \"dnsmasq-dns-57d769cc4f-p2bpp\" (UID: \"8e21bf72-5fd2-409e-9379-6ff3432aaf15\") " pod="openstack/dnsmasq-dns-57d769cc4f-p2bpp" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.863457 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84gmk\" (UniqueName: \"kubernetes.io/projected/8e21bf72-5fd2-409e-9379-6ff3432aaf15-kube-api-access-84gmk\") pod \"dnsmasq-dns-57d769cc4f-p2bpp\" (UID: \"8e21bf72-5fd2-409e-9379-6ff3432aaf15\") " pod="openstack/dnsmasq-dns-57d769cc4f-p2bpp" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.863557 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e21bf72-5fd2-409e-9379-6ff3432aaf15-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-p2bpp\" (UID: \"8e21bf72-5fd2-409e-9379-6ff3432aaf15\") " pod="openstack/dnsmasq-dns-57d769cc4f-p2bpp" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.865565 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e21bf72-5fd2-409e-9379-6ff3432aaf15-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-p2bpp\" (UID: \"8e21bf72-5fd2-409e-9379-6ff3432aaf15\") " pod="openstack/dnsmasq-dns-57d769cc4f-p2bpp" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.865678 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e21bf72-5fd2-409e-9379-6ff3432aaf15-config\") pod \"dnsmasq-dns-57d769cc4f-p2bpp\" (UID: \"8e21bf72-5fd2-409e-9379-6ff3432aaf15\") " pod="openstack/dnsmasq-dns-57d769cc4f-p2bpp" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.889712 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84gmk\" (UniqueName: \"kubernetes.io/projected/8e21bf72-5fd2-409e-9379-6ff3432aaf15-kube-api-access-84gmk\") pod \"dnsmasq-dns-57d769cc4f-p2bpp\" (UID: \"8e21bf72-5fd2-409e-9379-6ff3432aaf15\") " pod="openstack/dnsmasq-dns-57d769cc4f-p2bpp" Oct 03 08:54:43 crc kubenswrapper[4899]: I1003 08:54:43.971870 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-p2bpp" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.168532 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-jgjl2"] Oct 03 08:54:44 crc kubenswrapper[4899]: W1003 08:54:44.214117 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc605056d_832f_415a_abb2_a62b9f72a17f.slice/crio-ba0f60def4b14125697731aeace9c1232e84ba9f131e282aad5d7ac701f8fa79 WatchSource:0}: Error finding container ba0f60def4b14125697731aeace9c1232e84ba9f131e282aad5d7ac701f8fa79: Status 404 returned error can't find the container with id ba0f60def4b14125697731aeace9c1232e84ba9f131e282aad5d7ac701f8fa79 Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.292376 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-p2bpp"] Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.467818 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.469108 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.471001 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.471102 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.471089 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.471080 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.471456 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-6lr97" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.472321 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.472943 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.487957 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.576750 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.576850 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.576880 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.576917 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-server-conf\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.576976 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-449ld\" (UniqueName: \"kubernetes.io/projected/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-kube-api-access-449ld\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.577007 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-config-data\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.577041 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-pod-info\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.577071 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.577100 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.577140 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.577166 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.678352 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.678392 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.678416 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.678449 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.678469 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.678486 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-server-conf\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.678529 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-449ld\" (UniqueName: \"kubernetes.io/projected/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-kube-api-access-449ld\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.678554 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-config-data\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.678579 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-pod-info\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.678599 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.678617 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.678794 4899 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.679084 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.679428 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.679606 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.679702 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-config-data\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.680511 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-server-conf\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.684366 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.684583 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.686367 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.691814 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-pod-info\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.698560 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-449ld\" (UniqueName: \"kubernetes.io/projected/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-kube-api-access-449ld\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.707550 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.758419 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.761132 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.763294 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.763497 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.765729 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.765958 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-54tdh" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.766058 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.766410 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.766859 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.768597 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.803472 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.881881 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cff2733b-858c-4578-abcb-a0c503b556d3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.882206 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.882235 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jx7bs\" (UniqueName: \"kubernetes.io/projected/cff2733b-858c-4578-abcb-a0c503b556d3-kube-api-access-jx7bs\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.882273 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cff2733b-858c-4578-abcb-a0c503b556d3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.882295 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cff2733b-858c-4578-abcb-a0c503b556d3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.882337 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cff2733b-858c-4578-abcb-a0c503b556d3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.882360 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.882878 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cff2733b-858c-4578-abcb-a0c503b556d3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.882979 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.883082 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.883160 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.917880 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-p2bpp" event={"ID":"8e21bf72-5fd2-409e-9379-6ff3432aaf15","Type":"ContainerStarted","Data":"b333f8d28a133c9d8c66682b89c37fc290dd3c1734d7fa89249d79e543edad54"} Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.919854 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-jgjl2" event={"ID":"c605056d-832f-415a-abb2-a62b9f72a17f","Type":"ContainerStarted","Data":"ba0f60def4b14125697731aeace9c1232e84ba9f131e282aad5d7ac701f8fa79"} Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.985182 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cff2733b-858c-4578-abcb-a0c503b556d3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.985280 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cff2733b-858c-4578-abcb-a0c503b556d3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.985323 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cff2733b-858c-4578-abcb-a0c503b556d3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.985343 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.985364 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cff2733b-858c-4578-abcb-a0c503b556d3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.985426 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.985471 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.985499 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.985575 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cff2733b-858c-4578-abcb-a0c503b556d3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.985593 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.985611 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jx7bs\" (UniqueName: \"kubernetes.io/projected/cff2733b-858c-4578-abcb-a0c503b556d3-kube-api-access-jx7bs\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.986458 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.986559 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.986583 4899 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.986661 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cff2733b-858c-4578-abcb-a0c503b556d3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.986716 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cff2733b-858c-4578-abcb-a0c503b556d3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.988493 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cff2733b-858c-4578-abcb-a0c503b556d3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.989999 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cff2733b-858c-4578-abcb-a0c503b556d3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.990934 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.993538 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cff2733b-858c-4578-abcb-a0c503b556d3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:44 crc kubenswrapper[4899]: I1003 08:54:44.993511 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:45 crc kubenswrapper[4899]: I1003 08:54:45.014063 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jx7bs\" (UniqueName: \"kubernetes.io/projected/cff2733b-858c-4578-abcb-a0c503b556d3-kube-api-access-jx7bs\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:45 crc kubenswrapper[4899]: I1003 08:54:45.018197 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:45 crc kubenswrapper[4899]: I1003 08:54:45.089607 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:54:45 crc kubenswrapper[4899]: I1003 08:54:45.259074 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 08:54:45 crc kubenswrapper[4899]: I1003 08:54:45.555351 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.446780 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.451648 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.456832 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.461492 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-9xpzs" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.469134 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.469437 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.469652 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.471384 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.473149 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.480587 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.482436 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.490245 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.490638 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.491013 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-twqcl" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.491876 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.495495 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.528531 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ec49b55-9814-4053-a0dd-eda5b7f7995a-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.528706 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.528765 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ec49b55-9814-4053-a0dd-eda5b7f7995a-operator-scripts\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.528819 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.529029 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9ec49b55-9814-4053-a0dd-eda5b7f7995a-config-data-generated\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.529120 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.529190 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.529223 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/9ec49b55-9814-4053-a0dd-eda5b7f7995a-secrets\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.529263 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ec49b55-9814-4053-a0dd-eda5b7f7995a-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.529296 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.529348 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.529416 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxlc4\" (UniqueName: \"kubernetes.io/projected/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-kube-api-access-fxlc4\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.529472 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.529514 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.529544 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctgnl\" (UniqueName: \"kubernetes.io/projected/9ec49b55-9814-4053-a0dd-eda5b7f7995a-kube-api-access-ctgnl\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.529577 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9ec49b55-9814-4053-a0dd-eda5b7f7995a-kolla-config\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.529619 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.529662 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9ec49b55-9814-4053-a0dd-eda5b7f7995a-config-data-default\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.630987 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.631052 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9ec49b55-9814-4053-a0dd-eda5b7f7995a-config-data-generated\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.631077 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.631108 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/9ec49b55-9814-4053-a0dd-eda5b7f7995a-secrets\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.631125 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.631141 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ec49b55-9814-4053-a0dd-eda5b7f7995a-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.631155 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.631176 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.631200 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxlc4\" (UniqueName: \"kubernetes.io/projected/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-kube-api-access-fxlc4\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.631225 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.631244 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.631260 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctgnl\" (UniqueName: \"kubernetes.io/projected/9ec49b55-9814-4053-a0dd-eda5b7f7995a-kube-api-access-ctgnl\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.631275 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9ec49b55-9814-4053-a0dd-eda5b7f7995a-kolla-config\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.631294 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.631311 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9ec49b55-9814-4053-a0dd-eda5b7f7995a-config-data-default\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.631340 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ec49b55-9814-4053-a0dd-eda5b7f7995a-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.631353 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.631374 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ec49b55-9814-4053-a0dd-eda5b7f7995a-operator-scripts\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.632560 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ec49b55-9814-4053-a0dd-eda5b7f7995a-operator-scripts\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.633327 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.633531 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9ec49b55-9814-4053-a0dd-eda5b7f7995a-config-data-generated\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.637980 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.638444 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9ec49b55-9814-4053-a0dd-eda5b7f7995a-kolla-config\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.638719 4899 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.640790 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9ec49b55-9814-4053-a0dd-eda5b7f7995a-config-data-default\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.640964 4899 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.644242 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.644656 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.646360 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ec49b55-9814-4053-a0dd-eda5b7f7995a-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.647777 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/9ec49b55-9814-4053-a0dd-eda5b7f7995a-secrets\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.648197 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.661370 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.665464 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ec49b55-9814-4053-a0dd-eda5b7f7995a-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.672668 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.685310 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctgnl\" (UniqueName: \"kubernetes.io/projected/9ec49b55-9814-4053-a0dd-eda5b7f7995a-kube-api-access-ctgnl\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.705978 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxlc4\" (UniqueName: \"kubernetes.io/projected/b466ebf1-ec52-4c92-8ea9-f0f329c6ab93-kube-api-access-fxlc4\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.730096 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93\") " pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.751055 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"9ec49b55-9814-4053-a0dd-eda5b7f7995a\") " pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.789497 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 03 08:54:47 crc kubenswrapper[4899]: I1003 08:54:47.823423 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.066258 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.067537 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.069410 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.069632 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-ftk5x" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.069860 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.082978 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.240213 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b75c7\" (UniqueName: \"kubernetes.io/projected/231e3958-c17a-4f0b-a83e-4801b497b942-kube-api-access-b75c7\") pod \"memcached-0\" (UID: \"231e3958-c17a-4f0b-a83e-4801b497b942\") " pod="openstack/memcached-0" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.240450 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/231e3958-c17a-4f0b-a83e-4801b497b942-memcached-tls-certs\") pod \"memcached-0\" (UID: \"231e3958-c17a-4f0b-a83e-4801b497b942\") " pod="openstack/memcached-0" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.240537 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/231e3958-c17a-4f0b-a83e-4801b497b942-config-data\") pod \"memcached-0\" (UID: \"231e3958-c17a-4f0b-a83e-4801b497b942\") " pod="openstack/memcached-0" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.240566 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/231e3958-c17a-4f0b-a83e-4801b497b942-kolla-config\") pod \"memcached-0\" (UID: \"231e3958-c17a-4f0b-a83e-4801b497b942\") " pod="openstack/memcached-0" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.240631 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/231e3958-c17a-4f0b-a83e-4801b497b942-combined-ca-bundle\") pod \"memcached-0\" (UID: \"231e3958-c17a-4f0b-a83e-4801b497b942\") " pod="openstack/memcached-0" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.342189 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/231e3958-c17a-4f0b-a83e-4801b497b942-config-data\") pod \"memcached-0\" (UID: \"231e3958-c17a-4f0b-a83e-4801b497b942\") " pod="openstack/memcached-0" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.342256 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/231e3958-c17a-4f0b-a83e-4801b497b942-kolla-config\") pod \"memcached-0\" (UID: \"231e3958-c17a-4f0b-a83e-4801b497b942\") " pod="openstack/memcached-0" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.342293 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/231e3958-c17a-4f0b-a83e-4801b497b942-combined-ca-bundle\") pod \"memcached-0\" (UID: \"231e3958-c17a-4f0b-a83e-4801b497b942\") " pod="openstack/memcached-0" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.342368 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b75c7\" (UniqueName: \"kubernetes.io/projected/231e3958-c17a-4f0b-a83e-4801b497b942-kube-api-access-b75c7\") pod \"memcached-0\" (UID: \"231e3958-c17a-4f0b-a83e-4801b497b942\") " pod="openstack/memcached-0" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.342410 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/231e3958-c17a-4f0b-a83e-4801b497b942-memcached-tls-certs\") pod \"memcached-0\" (UID: \"231e3958-c17a-4f0b-a83e-4801b497b942\") " pod="openstack/memcached-0" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.343334 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/231e3958-c17a-4f0b-a83e-4801b497b942-config-data\") pod \"memcached-0\" (UID: \"231e3958-c17a-4f0b-a83e-4801b497b942\") " pod="openstack/memcached-0" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.343374 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/231e3958-c17a-4f0b-a83e-4801b497b942-kolla-config\") pod \"memcached-0\" (UID: \"231e3958-c17a-4f0b-a83e-4801b497b942\") " pod="openstack/memcached-0" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.347218 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/231e3958-c17a-4f0b-a83e-4801b497b942-combined-ca-bundle\") pod \"memcached-0\" (UID: \"231e3958-c17a-4f0b-a83e-4801b497b942\") " pod="openstack/memcached-0" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.361764 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b75c7\" (UniqueName: \"kubernetes.io/projected/231e3958-c17a-4f0b-a83e-4801b497b942-kube-api-access-b75c7\") pod \"memcached-0\" (UID: \"231e3958-c17a-4f0b-a83e-4801b497b942\") " pod="openstack/memcached-0" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.371345 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/231e3958-c17a-4f0b-a83e-4801b497b942-memcached-tls-certs\") pod \"memcached-0\" (UID: \"231e3958-c17a-4f0b-a83e-4801b497b942\") " pod="openstack/memcached-0" Oct 03 08:54:48 crc kubenswrapper[4899]: I1003 08:54:48.389172 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 03 08:54:50 crc kubenswrapper[4899]: I1003 08:54:50.012629 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 08:54:50 crc kubenswrapper[4899]: I1003 08:54:50.013872 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 08:54:50 crc kubenswrapper[4899]: I1003 08:54:50.016346 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-w2dhh" Oct 03 08:54:50 crc kubenswrapper[4899]: I1003 08:54:50.025958 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 08:54:50 crc kubenswrapper[4899]: I1003 08:54:50.167133 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcppj\" (UniqueName: \"kubernetes.io/projected/d6a2ca35-de4c-429e-9217-5047b31741ad-kube-api-access-lcppj\") pod \"kube-state-metrics-0\" (UID: \"d6a2ca35-de4c-429e-9217-5047b31741ad\") " pod="openstack/kube-state-metrics-0" Oct 03 08:54:50 crc kubenswrapper[4899]: I1003 08:54:50.269337 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcppj\" (UniqueName: \"kubernetes.io/projected/d6a2ca35-de4c-429e-9217-5047b31741ad-kube-api-access-lcppj\") pod \"kube-state-metrics-0\" (UID: \"d6a2ca35-de4c-429e-9217-5047b31741ad\") " pod="openstack/kube-state-metrics-0" Oct 03 08:54:50 crc kubenswrapper[4899]: I1003 08:54:50.287645 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcppj\" (UniqueName: \"kubernetes.io/projected/d6a2ca35-de4c-429e-9217-5047b31741ad-kube-api-access-lcppj\") pod \"kube-state-metrics-0\" (UID: \"d6a2ca35-de4c-429e-9217-5047b31741ad\") " pod="openstack/kube-state-metrics-0" Oct 03 08:54:50 crc kubenswrapper[4899]: I1003 08:54:50.334856 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 08:54:51 crc kubenswrapper[4899]: W1003 08:54:51.968091 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2d0a71f9_b4af_49f7_b2fe_267a78a4c086.slice/crio-f984cd2bb0c3cb28c24c9c2b00efebfa432b4d1fd873bd0163cd5a7cf016ad32 WatchSource:0}: Error finding container f984cd2bb0c3cb28c24c9c2b00efebfa432b4d1fd873bd0163cd5a7cf016ad32: Status 404 returned error can't find the container with id f984cd2bb0c3cb28c24c9c2b00efebfa432b4d1fd873bd0163cd5a7cf016ad32 Oct 03 08:54:51 crc kubenswrapper[4899]: I1003 08:54:51.986733 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cff2733b-858c-4578-abcb-a0c503b556d3","Type":"ContainerStarted","Data":"7860aa71d0506d14566487d585a11b18d34ca470279e1e5ccd56971a85ab844c"} Oct 03 08:54:51 crc kubenswrapper[4899]: I1003 08:54:51.987590 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2d0a71f9-b4af-49f7-b2fe-267a78a4c086","Type":"ContainerStarted","Data":"f984cd2bb0c3cb28c24c9c2b00efebfa432b4d1fd873bd0163cd5a7cf016ad32"} Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.649448 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-wfz45"] Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.653008 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.658128 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.658353 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.658455 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-w94wc" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.658549 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-wfz45"] Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.670711 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-c7dff"] Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.672675 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.694422 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-c7dff"] Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.830934 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/89364578-24ad-4c19-8e0b-ba123f58f4eb-scripts\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.831094 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/89364578-24ad-4c19-8e0b-ba123f58f4eb-var-log-ovn\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.831168 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/a14a6054-9c4a-414f-ab4e-b0732e33ce1c-var-lib\") pod \"ovn-controller-ovs-c7dff\" (UID: \"a14a6054-9c4a-414f-ab4e-b0732e33ce1c\") " pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.831191 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a14a6054-9c4a-414f-ab4e-b0732e33ce1c-scripts\") pod \"ovn-controller-ovs-c7dff\" (UID: \"a14a6054-9c4a-414f-ab4e-b0732e33ce1c\") " pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.831280 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkr8r\" (UniqueName: \"kubernetes.io/projected/89364578-24ad-4c19-8e0b-ba123f58f4eb-kube-api-access-nkr8r\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.831322 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgdmc\" (UniqueName: \"kubernetes.io/projected/a14a6054-9c4a-414f-ab4e-b0732e33ce1c-kube-api-access-vgdmc\") pod \"ovn-controller-ovs-c7dff\" (UID: \"a14a6054-9c4a-414f-ab4e-b0732e33ce1c\") " pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.831378 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/89364578-24ad-4c19-8e0b-ba123f58f4eb-var-run\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.831430 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/a14a6054-9c4a-414f-ab4e-b0732e33ce1c-var-log\") pod \"ovn-controller-ovs-c7dff\" (UID: \"a14a6054-9c4a-414f-ab4e-b0732e33ce1c\") " pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.831467 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/89364578-24ad-4c19-8e0b-ba123f58f4eb-var-run-ovn\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.831486 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89364578-24ad-4c19-8e0b-ba123f58f4eb-combined-ca-bundle\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.831524 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a14a6054-9c4a-414f-ab4e-b0732e33ce1c-var-run\") pod \"ovn-controller-ovs-c7dff\" (UID: \"a14a6054-9c4a-414f-ab4e-b0732e33ce1c\") " pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.831548 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/a14a6054-9c4a-414f-ab4e-b0732e33ce1c-etc-ovs\") pod \"ovn-controller-ovs-c7dff\" (UID: \"a14a6054-9c4a-414f-ab4e-b0732e33ce1c\") " pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.831572 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/89364578-24ad-4c19-8e0b-ba123f58f4eb-ovn-controller-tls-certs\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.932872 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/89364578-24ad-4c19-8e0b-ba123f58f4eb-var-log-ovn\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.932960 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/a14a6054-9c4a-414f-ab4e-b0732e33ce1c-var-lib\") pod \"ovn-controller-ovs-c7dff\" (UID: \"a14a6054-9c4a-414f-ab4e-b0732e33ce1c\") " pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.932983 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a14a6054-9c4a-414f-ab4e-b0732e33ce1c-scripts\") pod \"ovn-controller-ovs-c7dff\" (UID: \"a14a6054-9c4a-414f-ab4e-b0732e33ce1c\") " pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.933024 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkr8r\" (UniqueName: \"kubernetes.io/projected/89364578-24ad-4c19-8e0b-ba123f58f4eb-kube-api-access-nkr8r\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.933052 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgdmc\" (UniqueName: \"kubernetes.io/projected/a14a6054-9c4a-414f-ab4e-b0732e33ce1c-kube-api-access-vgdmc\") pod \"ovn-controller-ovs-c7dff\" (UID: \"a14a6054-9c4a-414f-ab4e-b0732e33ce1c\") " pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.933092 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/89364578-24ad-4c19-8e0b-ba123f58f4eb-var-run\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.933126 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/a14a6054-9c4a-414f-ab4e-b0732e33ce1c-var-log\") pod \"ovn-controller-ovs-c7dff\" (UID: \"a14a6054-9c4a-414f-ab4e-b0732e33ce1c\") " pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.933150 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/89364578-24ad-4c19-8e0b-ba123f58f4eb-var-run-ovn\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.933175 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89364578-24ad-4c19-8e0b-ba123f58f4eb-combined-ca-bundle\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.933207 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a14a6054-9c4a-414f-ab4e-b0732e33ce1c-var-run\") pod \"ovn-controller-ovs-c7dff\" (UID: \"a14a6054-9c4a-414f-ab4e-b0732e33ce1c\") " pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.933226 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/a14a6054-9c4a-414f-ab4e-b0732e33ce1c-etc-ovs\") pod \"ovn-controller-ovs-c7dff\" (UID: \"a14a6054-9c4a-414f-ab4e-b0732e33ce1c\") " pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.933250 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/89364578-24ad-4c19-8e0b-ba123f58f4eb-ovn-controller-tls-certs\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.933291 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/89364578-24ad-4c19-8e0b-ba123f58f4eb-scripts\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.935125 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/89364578-24ad-4c19-8e0b-ba123f58f4eb-var-run\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.935293 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/89364578-24ad-4c19-8e0b-ba123f58f4eb-var-log-ovn\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.935422 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/89364578-24ad-4c19-8e0b-ba123f58f4eb-var-run-ovn\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.935457 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a14a6054-9c4a-414f-ab4e-b0732e33ce1c-var-run\") pod \"ovn-controller-ovs-c7dff\" (UID: \"a14a6054-9c4a-414f-ab4e-b0732e33ce1c\") " pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.935478 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/a14a6054-9c4a-414f-ab4e-b0732e33ce1c-var-lib\") pod \"ovn-controller-ovs-c7dff\" (UID: \"a14a6054-9c4a-414f-ab4e-b0732e33ce1c\") " pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.935490 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/a14a6054-9c4a-414f-ab4e-b0732e33ce1c-etc-ovs\") pod \"ovn-controller-ovs-c7dff\" (UID: \"a14a6054-9c4a-414f-ab4e-b0732e33ce1c\") " pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.935767 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/89364578-24ad-4c19-8e0b-ba123f58f4eb-scripts\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.936323 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/a14a6054-9c4a-414f-ab4e-b0732e33ce1c-var-log\") pod \"ovn-controller-ovs-c7dff\" (UID: \"a14a6054-9c4a-414f-ab4e-b0732e33ce1c\") " pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.937265 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a14a6054-9c4a-414f-ab4e-b0732e33ce1c-scripts\") pod \"ovn-controller-ovs-c7dff\" (UID: \"a14a6054-9c4a-414f-ab4e-b0732e33ce1c\") " pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.942064 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89364578-24ad-4c19-8e0b-ba123f58f4eb-combined-ca-bundle\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.942463 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/89364578-24ad-4c19-8e0b-ba123f58f4eb-ovn-controller-tls-certs\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.951146 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkr8r\" (UniqueName: \"kubernetes.io/projected/89364578-24ad-4c19-8e0b-ba123f58f4eb-kube-api-access-nkr8r\") pod \"ovn-controller-wfz45\" (UID: \"89364578-24ad-4c19-8e0b-ba123f58f4eb\") " pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.964597 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgdmc\" (UniqueName: \"kubernetes.io/projected/a14a6054-9c4a-414f-ab4e-b0732e33ce1c-kube-api-access-vgdmc\") pod \"ovn-controller-ovs-c7dff\" (UID: \"a14a6054-9c4a-414f-ab4e-b0732e33ce1c\") " pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.977434 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-wfz45" Oct 03 08:54:53 crc kubenswrapper[4899]: I1003 08:54:53.989640 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.163727 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.165040 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.167195 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.167422 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.167635 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-9l278" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.167794 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.167931 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.175781 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.340302 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.340382 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.340417 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-config\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.340454 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.340522 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.340544 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2crf\" (UniqueName: \"kubernetes.io/projected/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-kube-api-access-v2crf\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.340597 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.342242 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.443711 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.443819 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.443921 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.443959 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-config\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.444020 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.444081 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.444116 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2crf\" (UniqueName: \"kubernetes.io/projected/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-kube-api-access-v2crf\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.444164 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.444215 4899 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.444244 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.444949 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-config\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.445306 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.453472 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.454392 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.454649 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.461405 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2crf\" (UniqueName: \"kubernetes.io/projected/6ad382d6-f0b8-43b2-aeea-98ace59fb6cf-kube-api-access-v2crf\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.478778 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf\") " pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:54 crc kubenswrapper[4899]: I1003 08:54:54.493301 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.740125 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.741882 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.744320 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-5lvl5" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.744489 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.744640 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.744641 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.752693 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.878415 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e11e1a1-7b97-4717-85bc-834b214d4526-config\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.878479 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e11e1a1-7b97-4717-85bc-834b214d4526-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.878527 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3e11e1a1-7b97-4717-85bc-834b214d4526-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.878580 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e11e1a1-7b97-4717-85bc-834b214d4526-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.878597 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2t5n\" (UniqueName: \"kubernetes.io/projected/3e11e1a1-7b97-4717-85bc-834b214d4526-kube-api-access-r2t5n\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.878618 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.878690 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e11e1a1-7b97-4717-85bc-834b214d4526-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.878751 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e11e1a1-7b97-4717-85bc-834b214d4526-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.980182 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e11e1a1-7b97-4717-85bc-834b214d4526-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.980287 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e11e1a1-7b97-4717-85bc-834b214d4526-config\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.980333 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e11e1a1-7b97-4717-85bc-834b214d4526-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.980361 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3e11e1a1-7b97-4717-85bc-834b214d4526-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.980409 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e11e1a1-7b97-4717-85bc-834b214d4526-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.980435 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2t5n\" (UniqueName: \"kubernetes.io/projected/3e11e1a1-7b97-4717-85bc-834b214d4526-kube-api-access-r2t5n\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.980465 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.980491 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e11e1a1-7b97-4717-85bc-834b214d4526-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.981328 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3e11e1a1-7b97-4717-85bc-834b214d4526-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.981997 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e11e1a1-7b97-4717-85bc-834b214d4526-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.982696 4899 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.985160 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e11e1a1-7b97-4717-85bc-834b214d4526-config\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.985783 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e11e1a1-7b97-4717-85bc-834b214d4526-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.985790 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e11e1a1-7b97-4717-85bc-834b214d4526-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.987062 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e11e1a1-7b97-4717-85bc-834b214d4526-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:56 crc kubenswrapper[4899]: I1003 08:54:56.999448 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2t5n\" (UniqueName: \"kubernetes.io/projected/3e11e1a1-7b97-4717-85bc-834b214d4526-kube-api-access-r2t5n\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:57 crc kubenswrapper[4899]: I1003 08:54:57.003346 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"3e11e1a1-7b97-4717-85bc-834b214d4526\") " pod="openstack/ovsdbserver-sb-0" Oct 03 08:54:57 crc kubenswrapper[4899]: I1003 08:54:57.061115 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 03 08:55:01 crc kubenswrapper[4899]: I1003 08:55:01.870664 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 03 08:55:02 crc kubenswrapper[4899]: E1003 08:55:02.256372 4899 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 03 08:55:02 crc kubenswrapper[4899]: E1003 08:55:02.256616 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-497xw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-6rx7j_openstack(11754825-8dda-4c30-81c7-bf8bc000600b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 08:55:02 crc kubenswrapper[4899]: E1003 08:55:02.257780 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-6rx7j" podUID="11754825-8dda-4c30-81c7-bf8bc000600b" Oct 03 08:55:02 crc kubenswrapper[4899]: E1003 08:55:02.261145 4899 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 03 08:55:02 crc kubenswrapper[4899]: E1003 08:55:02.261397 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f586d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-jgjl2_openstack(c605056d-832f-415a-abb2-a62b9f72a17f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 08:55:02 crc kubenswrapper[4899]: E1003 08:55:02.263403 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-jgjl2" podUID="c605056d-832f-415a-abb2-a62b9f72a17f" Oct 03 08:55:02 crc kubenswrapper[4899]: E1003 08:55:02.272595 4899 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 03 08:55:02 crc kubenswrapper[4899]: E1003 08:55:02.272771 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rjphb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-kd4pl_openstack(cc3b51d4-8ad5-4106-9c0e-68fc133d1818): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 08:55:02 crc kubenswrapper[4899]: E1003 08:55:02.273972 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-kd4pl" podUID="cc3b51d4-8ad5-4106-9c0e-68fc133d1818" Oct 03 08:55:03 crc kubenswrapper[4899]: E1003 08:55:03.079716 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-jgjl2" podUID="c605056d-832f-415a-abb2-a62b9f72a17f" Oct 03 08:55:03 crc kubenswrapper[4899]: E1003 08:55:03.187251 4899 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 03 08:55:03 crc kubenswrapper[4899]: E1003 08:55:03.188028 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-84gmk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-p2bpp_openstack(8e21bf72-5fd2-409e-9379-6ff3432aaf15): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 08:55:03 crc kubenswrapper[4899]: E1003 08:55:03.189424 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-p2bpp" podUID="8e21bf72-5fd2-409e-9379-6ff3432aaf15" Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.421587 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-kd4pl" Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.461254 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-6rx7j" Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.589596 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-497xw\" (UniqueName: \"kubernetes.io/projected/11754825-8dda-4c30-81c7-bf8bc000600b-kube-api-access-497xw\") pod \"11754825-8dda-4c30-81c7-bf8bc000600b\" (UID: \"11754825-8dda-4c30-81c7-bf8bc000600b\") " Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.589648 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11754825-8dda-4c30-81c7-bf8bc000600b-config\") pod \"11754825-8dda-4c30-81c7-bf8bc000600b\" (UID: \"11754825-8dda-4c30-81c7-bf8bc000600b\") " Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.589680 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc3b51d4-8ad5-4106-9c0e-68fc133d1818-config\") pod \"cc3b51d4-8ad5-4106-9c0e-68fc133d1818\" (UID: \"cc3b51d4-8ad5-4106-9c0e-68fc133d1818\") " Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.589760 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cc3b51d4-8ad5-4106-9c0e-68fc133d1818-dns-svc\") pod \"cc3b51d4-8ad5-4106-9c0e-68fc133d1818\" (UID: \"cc3b51d4-8ad5-4106-9c0e-68fc133d1818\") " Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.589785 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjphb\" (UniqueName: \"kubernetes.io/projected/cc3b51d4-8ad5-4106-9c0e-68fc133d1818-kube-api-access-rjphb\") pod \"cc3b51d4-8ad5-4106-9c0e-68fc133d1818\" (UID: \"cc3b51d4-8ad5-4106-9c0e-68fc133d1818\") " Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.592355 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc3b51d4-8ad5-4106-9c0e-68fc133d1818-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cc3b51d4-8ad5-4106-9c0e-68fc133d1818" (UID: "cc3b51d4-8ad5-4106-9c0e-68fc133d1818"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.592368 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11754825-8dda-4c30-81c7-bf8bc000600b-config" (OuterVolumeSpecName: "config") pod "11754825-8dda-4c30-81c7-bf8bc000600b" (UID: "11754825-8dda-4c30-81c7-bf8bc000600b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.592380 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc3b51d4-8ad5-4106-9c0e-68fc133d1818-config" (OuterVolumeSpecName: "config") pod "cc3b51d4-8ad5-4106-9c0e-68fc133d1818" (UID: "cc3b51d4-8ad5-4106-9c0e-68fc133d1818"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.594757 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc3b51d4-8ad5-4106-9c0e-68fc133d1818-kube-api-access-rjphb" (OuterVolumeSpecName: "kube-api-access-rjphb") pod "cc3b51d4-8ad5-4106-9c0e-68fc133d1818" (UID: "cc3b51d4-8ad5-4106-9c0e-68fc133d1818"). InnerVolumeSpecName "kube-api-access-rjphb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.595512 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11754825-8dda-4c30-81c7-bf8bc000600b-kube-api-access-497xw" (OuterVolumeSpecName: "kube-api-access-497xw") pod "11754825-8dda-4c30-81c7-bf8bc000600b" (UID: "11754825-8dda-4c30-81c7-bf8bc000600b"). InnerVolumeSpecName "kube-api-access-497xw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.691444 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-497xw\" (UniqueName: \"kubernetes.io/projected/11754825-8dda-4c30-81c7-bf8bc000600b-kube-api-access-497xw\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.691481 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11754825-8dda-4c30-81c7-bf8bc000600b-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.691493 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc3b51d4-8ad5-4106-9c0e-68fc133d1818-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.691504 4899 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cc3b51d4-8ad5-4106-9c0e-68fc133d1818-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.691514 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjphb\" (UniqueName: \"kubernetes.io/projected/cc3b51d4-8ad5-4106-9c0e-68fc133d1818-kube-api-access-rjphb\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.715875 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.739683 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 03 08:55:03 crc kubenswrapper[4899]: W1003 08:55:03.743277 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ec49b55_9814_4053_a0dd_eda5b7f7995a.slice/crio-0956e024a09a010b5dcb3e4fc5bde5f377fe4c4799e7c4cae40fcfcbf15e5bf5 WatchSource:0}: Error finding container 0956e024a09a010b5dcb3e4fc5bde5f377fe4c4799e7c4cae40fcfcbf15e5bf5: Status 404 returned error can't find the container with id 0956e024a09a010b5dcb3e4fc5bde5f377fe4c4799e7c4cae40fcfcbf15e5bf5 Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.840019 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.856797 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-wfz45"] Oct 03 08:55:03 crc kubenswrapper[4899]: W1003 08:55:03.871574 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb466ebf1_ec52_4c92_8ea9_f0f329c6ab93.slice/crio-84d53789241af591d7565a57043ef580039b75e9d15e96edf283a0355531d023 WatchSource:0}: Error finding container 84d53789241af591d7565a57043ef580039b75e9d15e96edf283a0355531d023: Status 404 returned error can't find the container with id 84d53789241af591d7565a57043ef580039b75e9d15e96edf283a0355531d023 Oct 03 08:55:03 crc kubenswrapper[4899]: I1003 08:55:03.926623 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 03 08:55:03 crc kubenswrapper[4899]: W1003 08:55:03.934174 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e11e1a1_7b97_4717_85bc_834b214d4526.slice/crio-aba108c494fd1c3d48968d283331673b4743f3aaa53f3f5ab60b9d52fdf5972f WatchSource:0}: Error finding container aba108c494fd1c3d48968d283331673b4743f3aaa53f3f5ab60b9d52fdf5972f: Status 404 returned error can't find the container with id aba108c494fd1c3d48968d283331673b4743f3aaa53f3f5ab60b9d52fdf5972f Oct 03 08:55:04 crc kubenswrapper[4899]: I1003 08:55:04.084974 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-wfz45" event={"ID":"89364578-24ad-4c19-8e0b-ba123f58f4eb","Type":"ContainerStarted","Data":"b767f1f4976f2a5cdf8da06e76b54ba67c80749a855a021412229fcf36a94af5"} Oct 03 08:55:04 crc kubenswrapper[4899]: I1003 08:55:04.086566 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d6a2ca35-de4c-429e-9217-5047b31741ad","Type":"ContainerStarted","Data":"8c3a5186ea5cdd8a0a57967437f4f3e24fbdffe56438a8088bf63b656f6bba3d"} Oct 03 08:55:04 crc kubenswrapper[4899]: I1003 08:55:04.088652 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"9ec49b55-9814-4053-a0dd-eda5b7f7995a","Type":"ContainerStarted","Data":"0956e024a09a010b5dcb3e4fc5bde5f377fe4c4799e7c4cae40fcfcbf15e5bf5"} Oct 03 08:55:04 crc kubenswrapper[4899]: I1003 08:55:04.089992 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-6rx7j" Oct 03 08:55:04 crc kubenswrapper[4899]: I1003 08:55:04.089992 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-6rx7j" event={"ID":"11754825-8dda-4c30-81c7-bf8bc000600b","Type":"ContainerDied","Data":"465869c501f3f34a593081a18294636badbbf9683efcce1c1bebbd1fcd60e783"} Oct 03 08:55:04 crc kubenswrapper[4899]: I1003 08:55:04.091075 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93","Type":"ContainerStarted","Data":"84d53789241af591d7565a57043ef580039b75e9d15e96edf283a0355531d023"} Oct 03 08:55:04 crc kubenswrapper[4899]: I1003 08:55:04.092017 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"231e3958-c17a-4f0b-a83e-4801b497b942","Type":"ContainerStarted","Data":"73bc44543661dbfcae7f5afdf854f3aec82c573cfdf0ef682aea223682ebf165"} Oct 03 08:55:04 crc kubenswrapper[4899]: I1003 08:55:04.093088 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"3e11e1a1-7b97-4717-85bc-834b214d4526","Type":"ContainerStarted","Data":"aba108c494fd1c3d48968d283331673b4743f3aaa53f3f5ab60b9d52fdf5972f"} Oct 03 08:55:04 crc kubenswrapper[4899]: I1003 08:55:04.095825 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-kd4pl" Oct 03 08:55:04 crc kubenswrapper[4899]: I1003 08:55:04.100043 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-kd4pl" event={"ID":"cc3b51d4-8ad5-4106-9c0e-68fc133d1818","Type":"ContainerDied","Data":"a5d5cc7c02f6f2b7feae63cf4013d998d62161546b5666088963cb2e5931d7c3"} Oct 03 08:55:04 crc kubenswrapper[4899]: E1003 08:55:04.100802 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-p2bpp" podUID="8e21bf72-5fd2-409e-9379-6ff3432aaf15" Oct 03 08:55:04 crc kubenswrapper[4899]: I1003 08:55:04.224737 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-kd4pl"] Oct 03 08:55:04 crc kubenswrapper[4899]: I1003 08:55:04.237327 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-kd4pl"] Oct 03 08:55:04 crc kubenswrapper[4899]: I1003 08:55:04.252353 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-6rx7j"] Oct 03 08:55:04 crc kubenswrapper[4899]: I1003 08:55:04.257996 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-6rx7j"] Oct 03 08:55:04 crc kubenswrapper[4899]: I1003 08:55:04.538725 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11754825-8dda-4c30-81c7-bf8bc000600b" path="/var/lib/kubelet/pods/11754825-8dda-4c30-81c7-bf8bc000600b/volumes" Oct 03 08:55:04 crc kubenswrapper[4899]: I1003 08:55:04.539225 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc3b51d4-8ad5-4106-9c0e-68fc133d1818" path="/var/lib/kubelet/pods/cc3b51d4-8ad5-4106-9c0e-68fc133d1818/volumes" Oct 03 08:55:04 crc kubenswrapper[4899]: I1003 08:55:04.775803 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-c7dff"] Oct 03 08:55:04 crc kubenswrapper[4899]: I1003 08:55:04.935627 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 03 08:55:05 crc kubenswrapper[4899]: W1003 08:55:05.087213 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ad382d6_f0b8_43b2_aeea_98ace59fb6cf.slice/crio-c9ec6822df3a5147e04e6c477ee301cd4b1745c8ae6badefdee43100d56523a5 WatchSource:0}: Error finding container c9ec6822df3a5147e04e6c477ee301cd4b1745c8ae6badefdee43100d56523a5: Status 404 returned error can't find the container with id c9ec6822df3a5147e04e6c477ee301cd4b1745c8ae6badefdee43100d56523a5 Oct 03 08:55:05 crc kubenswrapper[4899]: W1003 08:55:05.091120 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda14a6054_9c4a_414f_ab4e_b0732e33ce1c.slice/crio-67b9eaaf44eda737273a49ed90998c0cb0d914bdf4d1dd988052b9c3994e3f0f WatchSource:0}: Error finding container 67b9eaaf44eda737273a49ed90998c0cb0d914bdf4d1dd988052b9c3994e3f0f: Status 404 returned error can't find the container with id 67b9eaaf44eda737273a49ed90998c0cb0d914bdf4d1dd988052b9c3994e3f0f Oct 03 08:55:05 crc kubenswrapper[4899]: I1003 08:55:05.104753 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf","Type":"ContainerStarted","Data":"c9ec6822df3a5147e04e6c477ee301cd4b1745c8ae6badefdee43100d56523a5"} Oct 03 08:55:05 crc kubenswrapper[4899]: I1003 08:55:05.105811 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-c7dff" event={"ID":"a14a6054-9c4a-414f-ab4e-b0732e33ce1c","Type":"ContainerStarted","Data":"67b9eaaf44eda737273a49ed90998c0cb0d914bdf4d1dd988052b9c3994e3f0f"} Oct 03 08:55:05 crc kubenswrapper[4899]: I1003 08:55:05.107731 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cff2733b-858c-4578-abcb-a0c503b556d3","Type":"ContainerStarted","Data":"ee34ae64a9b2b48f629d7d07e1e2606394e7e159388889c38dbf46c3b11f31cc"} Oct 03 08:55:05 crc kubenswrapper[4899]: I1003 08:55:05.109506 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2d0a71f9-b4af-49f7-b2fe-267a78a4c086","Type":"ContainerStarted","Data":"473248a8c249a4ad67707f5370440c74ec966684cf8db9efdffc15efa23aea2f"} Oct 03 08:55:10 crc kubenswrapper[4899]: I1003 08:55:10.109057 4899 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 08:55:10 crc kubenswrapper[4899]: I1003 08:55:10.156056 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-c7dff" event={"ID":"a14a6054-9c4a-414f-ab4e-b0732e33ce1c","Type":"ContainerStarted","Data":"4f9d6cdb33674572553e74683d439c284a455cc8df99044a6a22c30ab0826bd8"} Oct 03 08:55:10 crc kubenswrapper[4899]: I1003 08:55:10.160745 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"231e3958-c17a-4f0b-a83e-4801b497b942","Type":"ContainerStarted","Data":"1c1307bd014ac2845676ff30eef937c5d7219bbb4d8d09abac066a7ffd7d8170"} Oct 03 08:55:10 crc kubenswrapper[4899]: I1003 08:55:10.161427 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 03 08:55:10 crc kubenswrapper[4899]: I1003 08:55:10.167847 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"3e11e1a1-7b97-4717-85bc-834b214d4526","Type":"ContainerStarted","Data":"756df0ca4cf33bccd755b448e69d6dd91a02aba7bfdb1f048fd2902d62ef862a"} Oct 03 08:55:10 crc kubenswrapper[4899]: I1003 08:55:10.170596 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d6a2ca35-de4c-429e-9217-5047b31741ad","Type":"ContainerStarted","Data":"e5e5dd3eac5f7c8b5bb9c58edb0885647f3f7a07220ffd557c4891fa7ab8e36b"} Oct 03 08:55:10 crc kubenswrapper[4899]: I1003 08:55:10.170794 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 03 08:55:10 crc kubenswrapper[4899]: I1003 08:55:10.177169 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"9ec49b55-9814-4053-a0dd-eda5b7f7995a","Type":"ContainerStarted","Data":"f15297fa59e1e5b490d95a3acb1294ead34522ef4d5601058a0175152704a758"} Oct 03 08:55:10 crc kubenswrapper[4899]: I1003 08:55:10.184676 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93","Type":"ContainerStarted","Data":"14480569bcda0d1fe47768d80a99b7a18f9ad247dad08a96c59d5cc5bc61ed4f"} Oct 03 08:55:10 crc kubenswrapper[4899]: I1003 08:55:10.189086 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf","Type":"ContainerStarted","Data":"10f0504b4fb610d7864982fb7a3eec4bd8231511f1a40db121cbd9ebea7250c0"} Oct 03 08:55:10 crc kubenswrapper[4899]: I1003 08:55:10.193532 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=19.776920648 podStartE2EDuration="22.19350812s" podCreationTimestamp="2025-10-03 08:54:48 +0000 UTC" firstStartedPulling="2025-10-03 08:55:03.166876762 +0000 UTC m=+877.274361715" lastFinishedPulling="2025-10-03 08:55:05.583464234 +0000 UTC m=+879.690949187" observedRunningTime="2025-10-03 08:55:10.19097577 +0000 UTC m=+884.298460733" watchObservedRunningTime="2025-10-03 08:55:10.19350812 +0000 UTC m=+884.300993083" Oct 03 08:55:10 crc kubenswrapper[4899]: I1003 08:55:10.219735 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=15.075469027 podStartE2EDuration="21.219708485s" podCreationTimestamp="2025-10-03 08:54:49 +0000 UTC" firstStartedPulling="2025-10-03 08:55:03.744125422 +0000 UTC m=+877.851610375" lastFinishedPulling="2025-10-03 08:55:09.88836488 +0000 UTC m=+883.995849833" observedRunningTime="2025-10-03 08:55:10.20332756 +0000 UTC m=+884.310812513" watchObservedRunningTime="2025-10-03 08:55:10.219708485 +0000 UTC m=+884.327193438" Oct 03 08:55:11 crc kubenswrapper[4899]: I1003 08:55:11.198050 4899 generic.go:334] "Generic (PLEG): container finished" podID="a14a6054-9c4a-414f-ab4e-b0732e33ce1c" containerID="4f9d6cdb33674572553e74683d439c284a455cc8df99044a6a22c30ab0826bd8" exitCode=0 Oct 03 08:55:11 crc kubenswrapper[4899]: I1003 08:55:11.198166 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-c7dff" event={"ID":"a14a6054-9c4a-414f-ab4e-b0732e33ce1c","Type":"ContainerDied","Data":"4f9d6cdb33674572553e74683d439c284a455cc8df99044a6a22c30ab0826bd8"} Oct 03 08:55:11 crc kubenswrapper[4899]: I1003 08:55:11.203970 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-wfz45" event={"ID":"89364578-24ad-4c19-8e0b-ba123f58f4eb","Type":"ContainerStarted","Data":"9be520a3ddb744c8317eb41941800ccb28aedf15dd723cd5d4cf34238f8fb532"} Oct 03 08:55:11 crc kubenswrapper[4899]: I1003 08:55:11.240746 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-wfz45" podStartSLOduration=12.284639183 podStartE2EDuration="18.240727963s" podCreationTimestamp="2025-10-03 08:54:53 +0000 UTC" firstStartedPulling="2025-10-03 08:55:03.863019118 +0000 UTC m=+877.970504071" lastFinishedPulling="2025-10-03 08:55:09.819107898 +0000 UTC m=+883.926592851" observedRunningTime="2025-10-03 08:55:11.234974202 +0000 UTC m=+885.342459155" watchObservedRunningTime="2025-10-03 08:55:11.240727963 +0000 UTC m=+885.348212916" Oct 03 08:55:12 crc kubenswrapper[4899]: I1003 08:55:12.198721 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 08:55:12 crc kubenswrapper[4899]: I1003 08:55:12.199600 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 08:55:12 crc kubenswrapper[4899]: I1003 08:55:12.218842 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-c7dff" event={"ID":"a14a6054-9c4a-414f-ab4e-b0732e33ce1c","Type":"ContainerStarted","Data":"aa1a190b8dcfcae314ec2c72b4f942a4db03bf7cc168fb01dda8aa697d70444b"} Oct 03 08:55:12 crc kubenswrapper[4899]: I1003 08:55:12.219498 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-wfz45" Oct 03 08:55:13 crc kubenswrapper[4899]: I1003 08:55:13.231363 4899 generic.go:334] "Generic (PLEG): container finished" podID="b466ebf1-ec52-4c92-8ea9-f0f329c6ab93" containerID="14480569bcda0d1fe47768d80a99b7a18f9ad247dad08a96c59d5cc5bc61ed4f" exitCode=0 Oct 03 08:55:13 crc kubenswrapper[4899]: I1003 08:55:13.231429 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93","Type":"ContainerDied","Data":"14480569bcda0d1fe47768d80a99b7a18f9ad247dad08a96c59d5cc5bc61ed4f"} Oct 03 08:55:13 crc kubenswrapper[4899]: I1003 08:55:13.235041 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"6ad382d6-f0b8-43b2-aeea-98ace59fb6cf","Type":"ContainerStarted","Data":"6ccd6b8b07352313faba37211c4551cb0c1bccbd2b84c348df0e7c2b70002090"} Oct 03 08:55:13 crc kubenswrapper[4899]: I1003 08:55:13.239148 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-c7dff" event={"ID":"a14a6054-9c4a-414f-ab4e-b0732e33ce1c","Type":"ContainerStarted","Data":"529300df8032e3e929989ba13108b3922b9b2fd70a6f9a0b229edbc7ef3342d5"} Oct 03 08:55:13 crc kubenswrapper[4899]: I1003 08:55:13.239884 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:55:13 crc kubenswrapper[4899]: I1003 08:55:13.239935 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:55:13 crc kubenswrapper[4899]: I1003 08:55:13.246563 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"3e11e1a1-7b97-4717-85bc-834b214d4526","Type":"ContainerStarted","Data":"1c3382e67569cbb4dca6f502414755a65f1d3dd6af2c778717076f1d465f6b45"} Oct 03 08:55:13 crc kubenswrapper[4899]: I1003 08:55:13.297955 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=9.585320296999999 podStartE2EDuration="18.297935276s" podCreationTimestamp="2025-10-03 08:54:55 +0000 UTC" firstStartedPulling="2025-10-03 08:55:03.936690408 +0000 UTC m=+878.044175351" lastFinishedPulling="2025-10-03 08:55:12.649305377 +0000 UTC m=+886.756790330" observedRunningTime="2025-10-03 08:55:13.294860109 +0000 UTC m=+887.402345062" watchObservedRunningTime="2025-10-03 08:55:13.297935276 +0000 UTC m=+887.405420229" Oct 03 08:55:13 crc kubenswrapper[4899]: I1003 08:55:13.321441 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-c7dff" podStartSLOduration=15.909577973 podStartE2EDuration="20.321422186s" podCreationTimestamp="2025-10-03 08:54:53 +0000 UTC" firstStartedPulling="2025-10-03 08:55:05.092764139 +0000 UTC m=+879.200249092" lastFinishedPulling="2025-10-03 08:55:09.504608352 +0000 UTC m=+883.612093305" observedRunningTime="2025-10-03 08:55:13.314307722 +0000 UTC m=+887.421792675" watchObservedRunningTime="2025-10-03 08:55:13.321422186 +0000 UTC m=+887.428907139" Oct 03 08:55:14 crc kubenswrapper[4899]: I1003 08:55:14.255353 4899 generic.go:334] "Generic (PLEG): container finished" podID="9ec49b55-9814-4053-a0dd-eda5b7f7995a" containerID="f15297fa59e1e5b490d95a3acb1294ead34522ef4d5601058a0175152704a758" exitCode=0 Oct 03 08:55:14 crc kubenswrapper[4899]: I1003 08:55:14.255436 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"9ec49b55-9814-4053-a0dd-eda5b7f7995a","Type":"ContainerDied","Data":"f15297fa59e1e5b490d95a3acb1294ead34522ef4d5601058a0175152704a758"} Oct 03 08:55:14 crc kubenswrapper[4899]: I1003 08:55:14.257528 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b466ebf1-ec52-4c92-8ea9-f0f329c6ab93","Type":"ContainerStarted","Data":"5d70cdffd176e24600f17281fbf6f783d7e5c9497891781aac0545d213d5a26c"} Oct 03 08:55:14 crc kubenswrapper[4899]: I1003 08:55:14.278309 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=13.730928644 podStartE2EDuration="21.278290903s" podCreationTimestamp="2025-10-03 08:54:53 +0000 UTC" firstStartedPulling="2025-10-03 08:55:05.089675572 +0000 UTC m=+879.197160525" lastFinishedPulling="2025-10-03 08:55:12.637037821 +0000 UTC m=+886.744522784" observedRunningTime="2025-10-03 08:55:13.332732742 +0000 UTC m=+887.440217695" watchObservedRunningTime="2025-10-03 08:55:14.278290903 +0000 UTC m=+888.385775866" Oct 03 08:55:14 crc kubenswrapper[4899]: I1003 08:55:14.309807 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=22.682955275 podStartE2EDuration="28.309786525s" podCreationTimestamp="2025-10-03 08:54:46 +0000 UTC" firstStartedPulling="2025-10-03 08:55:03.877541195 +0000 UTC m=+877.985026148" lastFinishedPulling="2025-10-03 08:55:09.504372445 +0000 UTC m=+883.611857398" observedRunningTime="2025-10-03 08:55:14.300435541 +0000 UTC m=+888.407920504" watchObservedRunningTime="2025-10-03 08:55:14.309786525 +0000 UTC m=+888.417271478" Oct 03 08:55:14 crc kubenswrapper[4899]: I1003 08:55:14.494087 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.061870 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.098175 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.265622 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"9ec49b55-9814-4053-a0dd-eda5b7f7995a","Type":"ContainerStarted","Data":"c9d0989add4f4ff8ad781fe0a12a34e631ec1418ed0c91c4d6b4efa553e233ff"} Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.266113 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.286001 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=23.692280934 podStartE2EDuration="29.285985092s" podCreationTimestamp="2025-10-03 08:54:46 +0000 UTC" firstStartedPulling="2025-10-03 08:55:03.746460136 +0000 UTC m=+877.853945089" lastFinishedPulling="2025-10-03 08:55:09.340164294 +0000 UTC m=+883.447649247" observedRunningTime="2025-10-03 08:55:15.283472932 +0000 UTC m=+889.390957885" watchObservedRunningTime="2025-10-03 08:55:15.285985092 +0000 UTC m=+889.393470055" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.302587 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.493752 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.547244 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.566724 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-p2bpp"] Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.608498 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-2tb7q"] Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.610132 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.614171 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-w992v"] Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.615265 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.619232 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.619254 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.642065 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-2tb7q"] Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.658001 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-w992v"] Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.709019 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b632f7f4-455b-485d-87d2-6c9d7d5ea289-config\") pod \"dnsmasq-dns-6bc7876d45-2tb7q\" (UID: \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\") " pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.709074 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/78c399de-31f5-439f-8f0b-24c8dba1875e-ovn-rundir\") pod \"ovn-controller-metrics-w992v\" (UID: \"78c399de-31f5-439f-8f0b-24c8dba1875e\") " pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.709140 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b632f7f4-455b-485d-87d2-6c9d7d5ea289-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-2tb7q\" (UID: \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\") " pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.709168 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78c399de-31f5-439f-8f0b-24c8dba1875e-combined-ca-bundle\") pod \"ovn-controller-metrics-w992v\" (UID: \"78c399de-31f5-439f-8f0b-24c8dba1875e\") " pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.709206 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gx6rk\" (UniqueName: \"kubernetes.io/projected/b632f7f4-455b-485d-87d2-6c9d7d5ea289-kube-api-access-gx6rk\") pod \"dnsmasq-dns-6bc7876d45-2tb7q\" (UID: \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\") " pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.709246 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b632f7f4-455b-485d-87d2-6c9d7d5ea289-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-2tb7q\" (UID: \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\") " pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.709283 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/78c399de-31f5-439f-8f0b-24c8dba1875e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-w992v\" (UID: \"78c399de-31f5-439f-8f0b-24c8dba1875e\") " pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.709305 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/78c399de-31f5-439f-8f0b-24c8dba1875e-ovs-rundir\") pod \"ovn-controller-metrics-w992v\" (UID: \"78c399de-31f5-439f-8f0b-24c8dba1875e\") " pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.709348 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78c399de-31f5-439f-8f0b-24c8dba1875e-config\") pod \"ovn-controller-metrics-w992v\" (UID: \"78c399de-31f5-439f-8f0b-24c8dba1875e\") " pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.709381 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fltz5\" (UniqueName: \"kubernetes.io/projected/78c399de-31f5-439f-8f0b-24c8dba1875e-kube-api-access-fltz5\") pod \"ovn-controller-metrics-w992v\" (UID: \"78c399de-31f5-439f-8f0b-24c8dba1875e\") " pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.813930 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b632f7f4-455b-485d-87d2-6c9d7d5ea289-config\") pod \"dnsmasq-dns-6bc7876d45-2tb7q\" (UID: \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\") " pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.813988 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/78c399de-31f5-439f-8f0b-24c8dba1875e-ovn-rundir\") pod \"ovn-controller-metrics-w992v\" (UID: \"78c399de-31f5-439f-8f0b-24c8dba1875e\") " pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.814036 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b632f7f4-455b-485d-87d2-6c9d7d5ea289-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-2tb7q\" (UID: \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\") " pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.814055 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78c399de-31f5-439f-8f0b-24c8dba1875e-combined-ca-bundle\") pod \"ovn-controller-metrics-w992v\" (UID: \"78c399de-31f5-439f-8f0b-24c8dba1875e\") " pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.814083 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gx6rk\" (UniqueName: \"kubernetes.io/projected/b632f7f4-455b-485d-87d2-6c9d7d5ea289-kube-api-access-gx6rk\") pod \"dnsmasq-dns-6bc7876d45-2tb7q\" (UID: \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\") " pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.814116 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b632f7f4-455b-485d-87d2-6c9d7d5ea289-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-2tb7q\" (UID: \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\") " pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.814146 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/78c399de-31f5-439f-8f0b-24c8dba1875e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-w992v\" (UID: \"78c399de-31f5-439f-8f0b-24c8dba1875e\") " pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.814162 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/78c399de-31f5-439f-8f0b-24c8dba1875e-ovs-rundir\") pod \"ovn-controller-metrics-w992v\" (UID: \"78c399de-31f5-439f-8f0b-24c8dba1875e\") " pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.814203 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78c399de-31f5-439f-8f0b-24c8dba1875e-config\") pod \"ovn-controller-metrics-w992v\" (UID: \"78c399de-31f5-439f-8f0b-24c8dba1875e\") " pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.814231 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fltz5\" (UniqueName: \"kubernetes.io/projected/78c399de-31f5-439f-8f0b-24c8dba1875e-kube-api-access-fltz5\") pod \"ovn-controller-metrics-w992v\" (UID: \"78c399de-31f5-439f-8f0b-24c8dba1875e\") " pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.815317 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b632f7f4-455b-485d-87d2-6c9d7d5ea289-config\") pod \"dnsmasq-dns-6bc7876d45-2tb7q\" (UID: \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\") " pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.815733 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/78c399de-31f5-439f-8f0b-24c8dba1875e-ovn-rundir\") pod \"ovn-controller-metrics-w992v\" (UID: \"78c399de-31f5-439f-8f0b-24c8dba1875e\") " pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.815935 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b632f7f4-455b-485d-87d2-6c9d7d5ea289-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-2tb7q\" (UID: \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\") " pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.816423 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b632f7f4-455b-485d-87d2-6c9d7d5ea289-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-2tb7q\" (UID: \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\") " pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.821123 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/78c399de-31f5-439f-8f0b-24c8dba1875e-ovs-rundir\") pod \"ovn-controller-metrics-w992v\" (UID: \"78c399de-31f5-439f-8f0b-24c8dba1875e\") " pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.821824 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78c399de-31f5-439f-8f0b-24c8dba1875e-config\") pod \"ovn-controller-metrics-w992v\" (UID: \"78c399de-31f5-439f-8f0b-24c8dba1875e\") " pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.841460 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78c399de-31f5-439f-8f0b-24c8dba1875e-combined-ca-bundle\") pod \"ovn-controller-metrics-w992v\" (UID: \"78c399de-31f5-439f-8f0b-24c8dba1875e\") " pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.845385 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fltz5\" (UniqueName: \"kubernetes.io/projected/78c399de-31f5-439f-8f0b-24c8dba1875e-kube-api-access-fltz5\") pod \"ovn-controller-metrics-w992v\" (UID: \"78c399de-31f5-439f-8f0b-24c8dba1875e\") " pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.857479 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/78c399de-31f5-439f-8f0b-24c8dba1875e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-w992v\" (UID: \"78c399de-31f5-439f-8f0b-24c8dba1875e\") " pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.861248 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gx6rk\" (UniqueName: \"kubernetes.io/projected/b632f7f4-455b-485d-87d2-6c9d7d5ea289-kube-api-access-gx6rk\") pod \"dnsmasq-dns-6bc7876d45-2tb7q\" (UID: \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\") " pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.945413 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-jgjl2"] Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.949308 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.968477 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-w992v" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.976455 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-mdfwg"] Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.977952 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.987502 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 03 08:55:15 crc kubenswrapper[4899]: I1003 08:55:15.993656 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-mdfwg"] Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.016141 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-p2bpp" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.139392 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84gmk\" (UniqueName: \"kubernetes.io/projected/8e21bf72-5fd2-409e-9379-6ff3432aaf15-kube-api-access-84gmk\") pod \"8e21bf72-5fd2-409e-9379-6ff3432aaf15\" (UID: \"8e21bf72-5fd2-409e-9379-6ff3432aaf15\") " Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.139797 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e21bf72-5fd2-409e-9379-6ff3432aaf15-config\") pod \"8e21bf72-5fd2-409e-9379-6ff3432aaf15\" (UID: \"8e21bf72-5fd2-409e-9379-6ff3432aaf15\") " Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.139860 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e21bf72-5fd2-409e-9379-6ff3432aaf15-dns-svc\") pod \"8e21bf72-5fd2-409e-9379-6ff3432aaf15\" (UID: \"8e21bf72-5fd2-409e-9379-6ff3432aaf15\") " Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.140453 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e21bf72-5fd2-409e-9379-6ff3432aaf15-config" (OuterVolumeSpecName: "config") pod "8e21bf72-5fd2-409e-9379-6ff3432aaf15" (UID: "8e21bf72-5fd2-409e-9379-6ff3432aaf15"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.140555 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-mdfwg\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.140795 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e21bf72-5fd2-409e-9379-6ff3432aaf15-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8e21bf72-5fd2-409e-9379-6ff3432aaf15" (UID: "8e21bf72-5fd2-409e-9379-6ff3432aaf15"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.140826 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58j7n\" (UniqueName: \"kubernetes.io/projected/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-kube-api-access-58j7n\") pod \"dnsmasq-dns-8554648995-mdfwg\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.140860 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-config\") pod \"dnsmasq-dns-8554648995-mdfwg\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.140879 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-mdfwg\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.140933 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-dns-svc\") pod \"dnsmasq-dns-8554648995-mdfwg\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.141000 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e21bf72-5fd2-409e-9379-6ff3432aaf15-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.141013 4899 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e21bf72-5fd2-409e-9379-6ff3432aaf15-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.144636 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e21bf72-5fd2-409e-9379-6ff3432aaf15-kube-api-access-84gmk" (OuterVolumeSpecName: "kube-api-access-84gmk") pod "8e21bf72-5fd2-409e-9379-6ff3432aaf15" (UID: "8e21bf72-5fd2-409e-9379-6ff3432aaf15"). InnerVolumeSpecName "kube-api-access-84gmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.242867 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-mdfwg\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.244014 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-mdfwg\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.244136 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58j7n\" (UniqueName: \"kubernetes.io/projected/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-kube-api-access-58j7n\") pod \"dnsmasq-dns-8554648995-mdfwg\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.244167 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-config\") pod \"dnsmasq-dns-8554648995-mdfwg\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.244186 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-mdfwg\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.244232 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-dns-svc\") pod \"dnsmasq-dns-8554648995-mdfwg\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.244281 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84gmk\" (UniqueName: \"kubernetes.io/projected/8e21bf72-5fd2-409e-9379-6ff3432aaf15-kube-api-access-84gmk\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.244875 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-dns-svc\") pod \"dnsmasq-dns-8554648995-mdfwg\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.245235 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-config\") pod \"dnsmasq-dns-8554648995-mdfwg\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.245545 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-mdfwg\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.261999 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58j7n\" (UniqueName: \"kubernetes.io/projected/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-kube-api-access-58j7n\") pod \"dnsmasq-dns-8554648995-mdfwg\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.273271 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-p2bpp" event={"ID":"8e21bf72-5fd2-409e-9379-6ff3432aaf15","Type":"ContainerDied","Data":"b333f8d28a133c9d8c66682b89c37fc290dd3c1734d7fa89249d79e543edad54"} Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.273368 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-p2bpp" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.328686 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-p2bpp"] Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.335463 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-p2bpp"] Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.335615 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.335805 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.413996 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-2tb7q"] Oct 03 08:55:16 crc kubenswrapper[4899]: W1003 08:55:16.416435 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb632f7f4_455b_485d_87d2_6c9d7d5ea289.slice/crio-c576eaf2c2d5bd719d63871bbd929ab690ab89f3cab2ba328cece824045c8c62 WatchSource:0}: Error finding container c576eaf2c2d5bd719d63871bbd929ab690ab89f3cab2ba328cece824045c8c62: Status 404 returned error can't find the container with id c576eaf2c2d5bd719d63871bbd929ab690ab89f3cab2ba328cece824045c8c62 Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.476613 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-w992v"] Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.514254 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.515665 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.517410 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.517579 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-ppptd" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.517697 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.517929 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.523706 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.541501 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e21bf72-5fd2-409e-9379-6ff3432aaf15" path="/var/lib/kubelet/pods/8e21bf72-5fd2-409e-9379-6ff3432aaf15/volumes" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.652745 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/71e79cb5-28f4-4102-892e-479502ff4db9-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.653244 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/71e79cb5-28f4-4102-892e-479502ff4db9-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.653311 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71e79cb5-28f4-4102-892e-479502ff4db9-config\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.653518 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e79cb5-28f4-4102-892e-479502ff4db9-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.653582 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dctr4\" (UniqueName: \"kubernetes.io/projected/71e79cb5-28f4-4102-892e-479502ff4db9-kube-api-access-dctr4\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.653727 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/71e79cb5-28f4-4102-892e-479502ff4db9-scripts\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.653756 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/71e79cb5-28f4-4102-892e-479502ff4db9-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.755265 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/71e79cb5-28f4-4102-892e-479502ff4db9-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.755334 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/71e79cb5-28f4-4102-892e-479502ff4db9-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.755399 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71e79cb5-28f4-4102-892e-479502ff4db9-config\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.755441 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e79cb5-28f4-4102-892e-479502ff4db9-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.755473 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dctr4\" (UniqueName: \"kubernetes.io/projected/71e79cb5-28f4-4102-892e-479502ff4db9-kube-api-access-dctr4\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.755500 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/71e79cb5-28f4-4102-892e-479502ff4db9-scripts\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.755516 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/71e79cb5-28f4-4102-892e-479502ff4db9-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.756779 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/71e79cb5-28f4-4102-892e-479502ff4db9-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.756790 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71e79cb5-28f4-4102-892e-479502ff4db9-config\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.757487 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/71e79cb5-28f4-4102-892e-479502ff4db9-scripts\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.759095 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/71e79cb5-28f4-4102-892e-479502ff4db9-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.759523 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/71e79cb5-28f4-4102-892e-479502ff4db9-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.762650 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e79cb5-28f4-4102-892e-479502ff4db9-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.773543 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dctr4\" (UniqueName: \"kubernetes.io/projected/71e79cb5-28f4-4102-892e-479502ff4db9-kube-api-access-dctr4\") pod \"ovn-northd-0\" (UID: \"71e79cb5-28f4-4102-892e-479502ff4db9\") " pod="openstack/ovn-northd-0" Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.840446 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-mdfwg"] Oct 03 08:55:16 crc kubenswrapper[4899]: I1003 08:55:16.870098 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.281630 4899 generic.go:334] "Generic (PLEG): container finished" podID="c605056d-832f-415a-abb2-a62b9f72a17f" containerID="ae93f4c557118798d42cee176ba684bfe134e7562b448c79f98a723ddf6c694d" exitCode=0 Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.281691 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-jgjl2" event={"ID":"c605056d-832f-415a-abb2-a62b9f72a17f","Type":"ContainerDied","Data":"ae93f4c557118798d42cee176ba684bfe134e7562b448c79f98a723ddf6c694d"} Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.285374 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-w992v" event={"ID":"78c399de-31f5-439f-8f0b-24c8dba1875e","Type":"ContainerStarted","Data":"76b38a8731430b5b1c626fe39a206895d04b95115fed707d041b605c7def4b1e"} Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.285432 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-w992v" event={"ID":"78c399de-31f5-439f-8f0b-24c8dba1875e","Type":"ContainerStarted","Data":"dcb51309395a55520ef904f7ceb0ca65bc7001fdc6cb2638693173bb46f2bcb5"} Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.296368 4899 generic.go:334] "Generic (PLEG): container finished" podID="b632f7f4-455b-485d-87d2-6c9d7d5ea289" containerID="990b6660e2c7d86028bbc2cd18681fe7da6b1a38e8f55b1b3fdfaec21624a9eb" exitCode=0 Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.296490 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" event={"ID":"b632f7f4-455b-485d-87d2-6c9d7d5ea289","Type":"ContainerDied","Data":"990b6660e2c7d86028bbc2cd18681fe7da6b1a38e8f55b1b3fdfaec21624a9eb"} Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.296519 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" event={"ID":"b632f7f4-455b-485d-87d2-6c9d7d5ea289","Type":"ContainerStarted","Data":"c576eaf2c2d5bd719d63871bbd929ab690ab89f3cab2ba328cece824045c8c62"} Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.300579 4899 generic.go:334] "Generic (PLEG): container finished" podID="fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12" containerID="5add2446bebd235e2fe87e50feb8b19092da2aa78b5af6b2de9ead86af59a860" exitCode=0 Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.302175 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-mdfwg" event={"ID":"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12","Type":"ContainerDied","Data":"5add2446bebd235e2fe87e50feb8b19092da2aa78b5af6b2de9ead86af59a860"} Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.302217 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-mdfwg" event={"ID":"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12","Type":"ContainerStarted","Data":"f874c456f94d8b6dc6adb173df0388b922927aad780bc7297f689d48f1dcf9e0"} Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.358182 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-w992v" podStartSLOduration=2.358157586 podStartE2EDuration="2.358157586s" podCreationTimestamp="2025-10-03 08:55:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:55:17.326086085 +0000 UTC m=+891.433571038" watchObservedRunningTime="2025-10-03 08:55:17.358157586 +0000 UTC m=+891.465642539" Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.383435 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.589805 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-jgjl2" Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.671322 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f586d\" (UniqueName: \"kubernetes.io/projected/c605056d-832f-415a-abb2-a62b9f72a17f-kube-api-access-f586d\") pod \"c605056d-832f-415a-abb2-a62b9f72a17f\" (UID: \"c605056d-832f-415a-abb2-a62b9f72a17f\") " Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.671461 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c605056d-832f-415a-abb2-a62b9f72a17f-config\") pod \"c605056d-832f-415a-abb2-a62b9f72a17f\" (UID: \"c605056d-832f-415a-abb2-a62b9f72a17f\") " Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.671558 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c605056d-832f-415a-abb2-a62b9f72a17f-dns-svc\") pod \"c605056d-832f-415a-abb2-a62b9f72a17f\" (UID: \"c605056d-832f-415a-abb2-a62b9f72a17f\") " Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.676023 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c605056d-832f-415a-abb2-a62b9f72a17f-kube-api-access-f586d" (OuterVolumeSpecName: "kube-api-access-f586d") pod "c605056d-832f-415a-abb2-a62b9f72a17f" (UID: "c605056d-832f-415a-abb2-a62b9f72a17f"). InnerVolumeSpecName "kube-api-access-f586d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.689950 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c605056d-832f-415a-abb2-a62b9f72a17f-config" (OuterVolumeSpecName: "config") pod "c605056d-832f-415a-abb2-a62b9f72a17f" (UID: "c605056d-832f-415a-abb2-a62b9f72a17f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.692609 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c605056d-832f-415a-abb2-a62b9f72a17f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c605056d-832f-415a-abb2-a62b9f72a17f" (UID: "c605056d-832f-415a-abb2-a62b9f72a17f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.773346 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f586d\" (UniqueName: \"kubernetes.io/projected/c605056d-832f-415a-abb2-a62b9f72a17f-kube-api-access-f586d\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.773396 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c605056d-832f-415a-abb2-a62b9f72a17f-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.773408 4899 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c605056d-832f-415a-abb2-a62b9f72a17f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.790667 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.790723 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.824177 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 03 08:55:17 crc kubenswrapper[4899]: I1003 08:55:17.824240 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 03 08:55:18 crc kubenswrapper[4899]: I1003 08:55:18.310933 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" event={"ID":"b632f7f4-455b-485d-87d2-6c9d7d5ea289","Type":"ContainerStarted","Data":"c4c474dbabf886e04e78a28aaabf59350acc53a1b892eff8a70db24ba4add230"} Oct 03 08:55:18 crc kubenswrapper[4899]: I1003 08:55:18.311003 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" Oct 03 08:55:18 crc kubenswrapper[4899]: I1003 08:55:18.316021 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-mdfwg" event={"ID":"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12","Type":"ContainerStarted","Data":"8ff74515d7f328a8d302764184e3dd7fd83e1a7de713fddb60c74ee39c059b25"} Oct 03 08:55:18 crc kubenswrapper[4899]: I1003 08:55:18.316092 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:18 crc kubenswrapper[4899]: I1003 08:55:18.319155 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-jgjl2" Oct 03 08:55:18 crc kubenswrapper[4899]: I1003 08:55:18.319322 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-jgjl2" event={"ID":"c605056d-832f-415a-abb2-a62b9f72a17f","Type":"ContainerDied","Data":"ba0f60def4b14125697731aeace9c1232e84ba9f131e282aad5d7ac701f8fa79"} Oct 03 08:55:18 crc kubenswrapper[4899]: I1003 08:55:18.319402 4899 scope.go:117] "RemoveContainer" containerID="ae93f4c557118798d42cee176ba684bfe134e7562b448c79f98a723ddf6c694d" Oct 03 08:55:18 crc kubenswrapper[4899]: I1003 08:55:18.322244 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"71e79cb5-28f4-4102-892e-479502ff4db9","Type":"ContainerStarted","Data":"0276312d8ebe68ca19f02d27d461e0cbf65e935ef09d61e63de064a573af61be"} Oct 03 08:55:18 crc kubenswrapper[4899]: I1003 08:55:18.335765 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" podStartSLOduration=3.335745616 podStartE2EDuration="3.335745616s" podCreationTimestamp="2025-10-03 08:55:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:55:18.328207898 +0000 UTC m=+892.435692881" watchObservedRunningTime="2025-10-03 08:55:18.335745616 +0000 UTC m=+892.443230569" Oct 03 08:55:18 crc kubenswrapper[4899]: I1003 08:55:18.353715 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-mdfwg" podStartSLOduration=3.353694181 podStartE2EDuration="3.353694181s" podCreationTimestamp="2025-10-03 08:55:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:55:18.346704801 +0000 UTC m=+892.454189764" watchObservedRunningTime="2025-10-03 08:55:18.353694181 +0000 UTC m=+892.461179134" Oct 03 08:55:18 crc kubenswrapper[4899]: I1003 08:55:18.390728 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 03 08:55:18 crc kubenswrapper[4899]: I1003 08:55:18.392422 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-jgjl2"] Oct 03 08:55:18 crc kubenswrapper[4899]: I1003 08:55:18.413144 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-jgjl2"] Oct 03 08:55:18 crc kubenswrapper[4899]: I1003 08:55:18.538635 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c605056d-832f-415a-abb2-a62b9f72a17f" path="/var/lib/kubelet/pods/c605056d-832f-415a-abb2-a62b9f72a17f/volumes" Oct 03 08:55:19 crc kubenswrapper[4899]: I1003 08:55:19.332600 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"71e79cb5-28f4-4102-892e-479502ff4db9","Type":"ContainerStarted","Data":"df26375b091bc49a9400fdfeffc71c6873c447a6cc08684efb4a9dafdd35af70"} Oct 03 08:55:19 crc kubenswrapper[4899]: I1003 08:55:19.332654 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"71e79cb5-28f4-4102-892e-479502ff4db9","Type":"ContainerStarted","Data":"47673e07f1374b47133d87bd8ed3fc283e243f163f38b944b461e2837cc6682b"} Oct 03 08:55:19 crc kubenswrapper[4899]: I1003 08:55:19.332969 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 03 08:55:19 crc kubenswrapper[4899]: I1003 08:55:19.353681 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.277683336 podStartE2EDuration="3.353664836s" podCreationTimestamp="2025-10-03 08:55:16 +0000 UTC" firstStartedPulling="2025-10-03 08:55:17.433721545 +0000 UTC m=+891.541206508" lastFinishedPulling="2025-10-03 08:55:18.509703055 +0000 UTC m=+892.617188008" observedRunningTime="2025-10-03 08:55:19.349307768 +0000 UTC m=+893.456792711" watchObservedRunningTime="2025-10-03 08:55:19.353664836 +0000 UTC m=+893.461149789" Oct 03 08:55:19 crc kubenswrapper[4899]: I1003 08:55:19.845739 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 03 08:55:19 crc kubenswrapper[4899]: I1003 08:55:19.896387 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.272455 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-2tb7q"] Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.299941 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-nj5rb"] Oct 03 08:55:20 crc kubenswrapper[4899]: E1003 08:55:20.301634 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c605056d-832f-415a-abb2-a62b9f72a17f" containerName="init" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.301801 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="c605056d-832f-415a-abb2-a62b9f72a17f" containerName="init" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.302100 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="c605056d-832f-415a-abb2-a62b9f72a17f" containerName="init" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.303275 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.319646 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-nj5rb"] Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.341042 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" podUID="b632f7f4-455b-485d-87d2-6c9d7d5ea289" containerName="dnsmasq-dns" containerID="cri-o://c4c474dbabf886e04e78a28aaabf59350acc53a1b892eff8a70db24ba4add230" gracePeriod=10 Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.353240 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.515326 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-config\") pod \"dnsmasq-dns-b8fbc5445-nj5rb\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.515378 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ckt5\" (UniqueName: \"kubernetes.io/projected/dbbacbbc-f946-45cc-abdb-0389500e5c19-kube-api-access-2ckt5\") pod \"dnsmasq-dns-b8fbc5445-nj5rb\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.515516 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-nj5rb\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.515551 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-nj5rb\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.515640 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-nj5rb\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.618629 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-config\") pod \"dnsmasq-dns-b8fbc5445-nj5rb\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.617356 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-config\") pod \"dnsmasq-dns-b8fbc5445-nj5rb\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.619038 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ckt5\" (UniqueName: \"kubernetes.io/projected/dbbacbbc-f946-45cc-abdb-0389500e5c19-kube-api-access-2ckt5\") pod \"dnsmasq-dns-b8fbc5445-nj5rb\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.619577 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-nj5rb\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.619607 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-nj5rb\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.619698 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-nj5rb\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.620564 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-nj5rb\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.621246 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-nj5rb\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.621866 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-nj5rb\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.646593 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ckt5\" (UniqueName: \"kubernetes.io/projected/dbbacbbc-f946-45cc-abdb-0389500e5c19-kube-api-access-2ckt5\") pod \"dnsmasq-dns-b8fbc5445-nj5rb\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.778576 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.926185 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b632f7f4-455b-485d-87d2-6c9d7d5ea289-ovsdbserver-sb\") pod \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\" (UID: \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\") " Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.926267 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gx6rk\" (UniqueName: \"kubernetes.io/projected/b632f7f4-455b-485d-87d2-6c9d7d5ea289-kube-api-access-gx6rk\") pod \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\" (UID: \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\") " Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.926304 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b632f7f4-455b-485d-87d2-6c9d7d5ea289-dns-svc\") pod \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\" (UID: \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\") " Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.926426 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b632f7f4-455b-485d-87d2-6c9d7d5ea289-config\") pod \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\" (UID: \"b632f7f4-455b-485d-87d2-6c9d7d5ea289\") " Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.930314 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.930347 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b632f7f4-455b-485d-87d2-6c9d7d5ea289-kube-api-access-gx6rk" (OuterVolumeSpecName: "kube-api-access-gx6rk") pod "b632f7f4-455b-485d-87d2-6c9d7d5ea289" (UID: "b632f7f4-455b-485d-87d2-6c9d7d5ea289"). InnerVolumeSpecName "kube-api-access-gx6rk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.966028 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b632f7f4-455b-485d-87d2-6c9d7d5ea289-config" (OuterVolumeSpecName: "config") pod "b632f7f4-455b-485d-87d2-6c9d7d5ea289" (UID: "b632f7f4-455b-485d-87d2-6c9d7d5ea289"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.966745 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b632f7f4-455b-485d-87d2-6c9d7d5ea289-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b632f7f4-455b-485d-87d2-6c9d7d5ea289" (UID: "b632f7f4-455b-485d-87d2-6c9d7d5ea289"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:55:20 crc kubenswrapper[4899]: I1003 08:55:20.977501 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b632f7f4-455b-485d-87d2-6c9d7d5ea289-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b632f7f4-455b-485d-87d2-6c9d7d5ea289" (UID: "b632f7f4-455b-485d-87d2-6c9d7d5ea289"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.027995 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b632f7f4-455b-485d-87d2-6c9d7d5ea289-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.028026 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b632f7f4-455b-485d-87d2-6c9d7d5ea289-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.028038 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gx6rk\" (UniqueName: \"kubernetes.io/projected/b632f7f4-455b-485d-87d2-6c9d7d5ea289-kube-api-access-gx6rk\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.028046 4899 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b632f7f4-455b-485d-87d2-6c9d7d5ea289-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.339410 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-nj5rb"] Oct 03 08:55:21 crc kubenswrapper[4899]: W1003 08:55:21.340629 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddbbacbbc_f946_45cc_abdb_0389500e5c19.slice/crio-e357b5cc4b256c5db51aeb80f47d686e0a3fca5462cadeef72438054a958a944 WatchSource:0}: Error finding container e357b5cc4b256c5db51aeb80f47d686e0a3fca5462cadeef72438054a958a944: Status 404 returned error can't find the container with id e357b5cc4b256c5db51aeb80f47d686e0a3fca5462cadeef72438054a958a944 Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.349609 4899 generic.go:334] "Generic (PLEG): container finished" podID="b632f7f4-455b-485d-87d2-6c9d7d5ea289" containerID="c4c474dbabf886e04e78a28aaabf59350acc53a1b892eff8a70db24ba4add230" exitCode=0 Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.349668 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" event={"ID":"b632f7f4-455b-485d-87d2-6c9d7d5ea289","Type":"ContainerDied","Data":"c4c474dbabf886e04e78a28aaabf59350acc53a1b892eff8a70db24ba4add230"} Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.349686 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.349716 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-2tb7q" event={"ID":"b632f7f4-455b-485d-87d2-6c9d7d5ea289","Type":"ContainerDied","Data":"c576eaf2c2d5bd719d63871bbd929ab690ab89f3cab2ba328cece824045c8c62"} Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.349737 4899 scope.go:117] "RemoveContainer" containerID="c4c474dbabf886e04e78a28aaabf59350acc53a1b892eff8a70db24ba4add230" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.372035 4899 scope.go:117] "RemoveContainer" containerID="990b6660e2c7d86028bbc2cd18681fe7da6b1a38e8f55b1b3fdfaec21624a9eb" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.399341 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-2tb7q"] Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.402963 4899 scope.go:117] "RemoveContainer" containerID="c4c474dbabf886e04e78a28aaabf59350acc53a1b892eff8a70db24ba4add230" Oct 03 08:55:21 crc kubenswrapper[4899]: E1003 08:55:21.403495 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4c474dbabf886e04e78a28aaabf59350acc53a1b892eff8a70db24ba4add230\": container with ID starting with c4c474dbabf886e04e78a28aaabf59350acc53a1b892eff8a70db24ba4add230 not found: ID does not exist" containerID="c4c474dbabf886e04e78a28aaabf59350acc53a1b892eff8a70db24ba4add230" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.403525 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4c474dbabf886e04e78a28aaabf59350acc53a1b892eff8a70db24ba4add230"} err="failed to get container status \"c4c474dbabf886e04e78a28aaabf59350acc53a1b892eff8a70db24ba4add230\": rpc error: code = NotFound desc = could not find container \"c4c474dbabf886e04e78a28aaabf59350acc53a1b892eff8a70db24ba4add230\": container with ID starting with c4c474dbabf886e04e78a28aaabf59350acc53a1b892eff8a70db24ba4add230 not found: ID does not exist" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.403545 4899 scope.go:117] "RemoveContainer" containerID="990b6660e2c7d86028bbc2cd18681fe7da6b1a38e8f55b1b3fdfaec21624a9eb" Oct 03 08:55:21 crc kubenswrapper[4899]: E1003 08:55:21.403985 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"990b6660e2c7d86028bbc2cd18681fe7da6b1a38e8f55b1b3fdfaec21624a9eb\": container with ID starting with 990b6660e2c7d86028bbc2cd18681fe7da6b1a38e8f55b1b3fdfaec21624a9eb not found: ID does not exist" containerID="990b6660e2c7d86028bbc2cd18681fe7da6b1a38e8f55b1b3fdfaec21624a9eb" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.404041 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"990b6660e2c7d86028bbc2cd18681fe7da6b1a38e8f55b1b3fdfaec21624a9eb"} err="failed to get container status \"990b6660e2c7d86028bbc2cd18681fe7da6b1a38e8f55b1b3fdfaec21624a9eb\": rpc error: code = NotFound desc = could not find container \"990b6660e2c7d86028bbc2cd18681fe7da6b1a38e8f55b1b3fdfaec21624a9eb\": container with ID starting with 990b6660e2c7d86028bbc2cd18681fe7da6b1a38e8f55b1b3fdfaec21624a9eb not found: ID does not exist" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.406685 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-2tb7q"] Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.438274 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Oct 03 08:55:21 crc kubenswrapper[4899]: E1003 08:55:21.438836 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b632f7f4-455b-485d-87d2-6c9d7d5ea289" containerName="dnsmasq-dns" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.438935 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="b632f7f4-455b-485d-87d2-6c9d7d5ea289" containerName="dnsmasq-dns" Oct 03 08:55:21 crc kubenswrapper[4899]: E1003 08:55:21.439024 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b632f7f4-455b-485d-87d2-6c9d7d5ea289" containerName="init" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.439085 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="b632f7f4-455b-485d-87d2-6c9d7d5ea289" containerName="init" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.439327 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="b632f7f4-455b-485d-87d2-6c9d7d5ea289" containerName="dnsmasq-dns" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.443989 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.449352 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.449525 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.449652 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-4rzw8" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.449763 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.465173 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.640135 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.640488 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-cache\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.640515 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8h98p\" (UniqueName: \"kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-kube-api-access-8h98p\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.640568 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.640671 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-lock\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.742204 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-lock\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.742726 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-lock\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.743222 4899 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/swift-storage-0" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.743458 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.743518 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-cache\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.743536 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8h98p\" (UniqueName: \"kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-kube-api-access-8h98p\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.743565 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:21 crc kubenswrapper[4899]: E1003 08:55:21.743683 4899 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 08:55:21 crc kubenswrapper[4899]: E1003 08:55:21.743696 4899 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 08:55:21 crc kubenswrapper[4899]: E1003 08:55:21.743741 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift podName:a9725bfb-2b4a-49d7-b4d8-c2235583f28f nodeName:}" failed. No retries permitted until 2025-10-03 08:55:22.243725142 +0000 UTC m=+896.351210095 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift") pod "swift-storage-0" (UID: "a9725bfb-2b4a-49d7-b4d8-c2235583f28f") : configmap "swift-ring-files" not found Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.743919 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-cache\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.764289 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8h98p\" (UniqueName: \"kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-kube-api-access-8h98p\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.764850 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.886878 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 03 08:55:21 crc kubenswrapper[4899]: I1003 08:55:21.934796 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 03 08:55:22 crc kubenswrapper[4899]: I1003 08:55:22.253004 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:22 crc kubenswrapper[4899]: E1003 08:55:22.253197 4899 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 08:55:22 crc kubenswrapper[4899]: E1003 08:55:22.253390 4899 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 08:55:22 crc kubenswrapper[4899]: E1003 08:55:22.253496 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift podName:a9725bfb-2b4a-49d7-b4d8-c2235583f28f nodeName:}" failed. No retries permitted until 2025-10-03 08:55:23.253479027 +0000 UTC m=+897.360963980 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift") pod "swift-storage-0" (UID: "a9725bfb-2b4a-49d7-b4d8-c2235583f28f") : configmap "swift-ring-files" not found Oct 03 08:55:22 crc kubenswrapper[4899]: I1003 08:55:22.358106 4899 generic.go:334] "Generic (PLEG): container finished" podID="dbbacbbc-f946-45cc-abdb-0389500e5c19" containerID="fb3e84e8ee193f58786e8f6657b3fdc52745c71de5fd1fd1ba04f87c9945f199" exitCode=0 Oct 03 08:55:22 crc kubenswrapper[4899]: I1003 08:55:22.358167 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" event={"ID":"dbbacbbc-f946-45cc-abdb-0389500e5c19","Type":"ContainerDied","Data":"fb3e84e8ee193f58786e8f6657b3fdc52745c71de5fd1fd1ba04f87c9945f199"} Oct 03 08:55:22 crc kubenswrapper[4899]: I1003 08:55:22.358230 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" event={"ID":"dbbacbbc-f946-45cc-abdb-0389500e5c19","Type":"ContainerStarted","Data":"e357b5cc4b256c5db51aeb80f47d686e0a3fca5462cadeef72438054a958a944"} Oct 03 08:55:22 crc kubenswrapper[4899]: I1003 08:55:22.538488 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b632f7f4-455b-485d-87d2-6c9d7d5ea289" path="/var/lib/kubelet/pods/b632f7f4-455b-485d-87d2-6c9d7d5ea289/volumes" Oct 03 08:55:23 crc kubenswrapper[4899]: I1003 08:55:23.270041 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:23 crc kubenswrapper[4899]: E1003 08:55:23.270236 4899 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 08:55:23 crc kubenswrapper[4899]: E1003 08:55:23.270271 4899 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 08:55:23 crc kubenswrapper[4899]: E1003 08:55:23.270341 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift podName:a9725bfb-2b4a-49d7-b4d8-c2235583f28f nodeName:}" failed. No retries permitted until 2025-10-03 08:55:25.270319263 +0000 UTC m=+899.377804216 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift") pod "swift-storage-0" (UID: "a9725bfb-2b4a-49d7-b4d8-c2235583f28f") : configmap "swift-ring-files" not found Oct 03 08:55:23 crc kubenswrapper[4899]: I1003 08:55:23.366835 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" event={"ID":"dbbacbbc-f946-45cc-abdb-0389500e5c19","Type":"ContainerStarted","Data":"2d57925db20a8b04d78c247124c30c43b01a5af3c3cb897c7a01aa2ea9616655"} Oct 03 08:55:23 crc kubenswrapper[4899]: I1003 08:55:23.367025 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.306126 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:25 crc kubenswrapper[4899]: E1003 08:55:25.306318 4899 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 08:55:25 crc kubenswrapper[4899]: E1003 08:55:25.306453 4899 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 08:55:25 crc kubenswrapper[4899]: E1003 08:55:25.306521 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift podName:a9725bfb-2b4a-49d7-b4d8-c2235583f28f nodeName:}" failed. No retries permitted until 2025-10-03 08:55:29.306488374 +0000 UTC m=+903.413973327 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift") pod "swift-storage-0" (UID: "a9725bfb-2b4a-49d7-b4d8-c2235583f28f") : configmap "swift-ring-files" not found Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.384020 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" podStartSLOduration=5.384000365 podStartE2EDuration="5.384000365s" podCreationTimestamp="2025-10-03 08:55:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:55:23.384584183 +0000 UTC m=+897.492069136" watchObservedRunningTime="2025-10-03 08:55:25.384000365 +0000 UTC m=+899.491485318" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.386024 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-s5xff"] Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.387267 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.389526 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.389595 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.389634 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.397635 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-s5xff"] Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.508667 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-combined-ca-bundle\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.508738 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-swiftconf\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.508790 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-scripts\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.509000 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-etc-swift\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.509135 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbmvc\" (UniqueName: \"kubernetes.io/projected/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-kube-api-access-mbmvc\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.509181 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-dispersionconf\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.509312 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-ring-data-devices\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.610838 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-combined-ca-bundle\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.611004 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-swiftconf\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.611059 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-scripts\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.611122 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-etc-swift\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.611169 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbmvc\" (UniqueName: \"kubernetes.io/projected/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-kube-api-access-mbmvc\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.611206 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-dispersionconf\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.611252 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-ring-data-devices\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.611606 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-etc-swift\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.611702 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-scripts\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.612093 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-ring-data-devices\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.615807 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-swiftconf\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.620343 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-dispersionconf\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.620557 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-combined-ca-bundle\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.639039 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbmvc\" (UniqueName: \"kubernetes.io/projected/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-kube-api-access-mbmvc\") pod \"swift-ring-rebalance-s5xff\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:25 crc kubenswrapper[4899]: I1003 08:55:25.703835 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:26 crc kubenswrapper[4899]: I1003 08:55:26.106039 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-s5xff"] Oct 03 08:55:26 crc kubenswrapper[4899]: W1003 08:55:26.108515 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod79a7b2d1_b9f8_4637_b1b2_1bfc4b5739ca.slice/crio-7bc857ce8487d879bc42f7d7bdc058b0e3a8e3418bf3d68d16a19e3a3c540082 WatchSource:0}: Error finding container 7bc857ce8487d879bc42f7d7bdc058b0e3a8e3418bf3d68d16a19e3a3c540082: Status 404 returned error can't find the container with id 7bc857ce8487d879bc42f7d7bdc058b0e3a8e3418bf3d68d16a19e3a3c540082 Oct 03 08:55:26 crc kubenswrapper[4899]: I1003 08:55:26.339003 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:26 crc kubenswrapper[4899]: I1003 08:55:26.389672 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-s5xff" event={"ID":"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca","Type":"ContainerStarted","Data":"7bc857ce8487d879bc42f7d7bdc058b0e3a8e3418bf3d68d16a19e3a3c540082"} Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.078090 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-vhnnx"] Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.080026 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-vhnnx" Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.086303 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-vhnnx"] Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.149913 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6rcc\" (UniqueName: \"kubernetes.io/projected/dcceacb3-5a24-43fc-b90b-036ae2fd715a-kube-api-access-c6rcc\") pod \"keystone-db-create-vhnnx\" (UID: \"dcceacb3-5a24-43fc-b90b-036ae2fd715a\") " pod="openstack/keystone-db-create-vhnnx" Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.251018 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6rcc\" (UniqueName: \"kubernetes.io/projected/dcceacb3-5a24-43fc-b90b-036ae2fd715a-kube-api-access-c6rcc\") pod \"keystone-db-create-vhnnx\" (UID: \"dcceacb3-5a24-43fc-b90b-036ae2fd715a\") " pod="openstack/keystone-db-create-vhnnx" Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.278775 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-kjd74"] Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.279971 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-kjd74" Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.286383 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-kjd74"] Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.288466 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6rcc\" (UniqueName: \"kubernetes.io/projected/dcceacb3-5a24-43fc-b90b-036ae2fd715a-kube-api-access-c6rcc\") pod \"keystone-db-create-vhnnx\" (UID: \"dcceacb3-5a24-43fc-b90b-036ae2fd715a\") " pod="openstack/keystone-db-create-vhnnx" Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.352855 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcxm5\" (UniqueName: \"kubernetes.io/projected/16d9f858-ae62-4d37-b2de-97fb7d70d6a3-kube-api-access-mcxm5\") pod \"placement-db-create-kjd74\" (UID: \"16d9f858-ae62-4d37-b2de-97fb7d70d6a3\") " pod="openstack/placement-db-create-kjd74" Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.408021 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-vhnnx" Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.454504 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcxm5\" (UniqueName: \"kubernetes.io/projected/16d9f858-ae62-4d37-b2de-97fb7d70d6a3-kube-api-access-mcxm5\") pod \"placement-db-create-kjd74\" (UID: \"16d9f858-ae62-4d37-b2de-97fb7d70d6a3\") " pod="openstack/placement-db-create-kjd74" Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.471392 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcxm5\" (UniqueName: \"kubernetes.io/projected/16d9f858-ae62-4d37-b2de-97fb7d70d6a3-kube-api-access-mcxm5\") pod \"placement-db-create-kjd74\" (UID: \"16d9f858-ae62-4d37-b2de-97fb7d70d6a3\") " pod="openstack/placement-db-create-kjd74" Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.544512 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-5l7x5"] Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.546106 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5l7x5" Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.569981 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-5l7x5"] Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.632609 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-kjd74" Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.659263 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwwjx\" (UniqueName: \"kubernetes.io/projected/381e208b-07bc-42e2-88b1-99ae1ae4f907-kube-api-access-xwwjx\") pod \"glance-db-create-5l7x5\" (UID: \"381e208b-07bc-42e2-88b1-99ae1ae4f907\") " pod="openstack/glance-db-create-5l7x5" Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.761072 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwwjx\" (UniqueName: \"kubernetes.io/projected/381e208b-07bc-42e2-88b1-99ae1ae4f907-kube-api-access-xwwjx\") pod \"glance-db-create-5l7x5\" (UID: \"381e208b-07bc-42e2-88b1-99ae1ae4f907\") " pod="openstack/glance-db-create-5l7x5" Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.778663 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwwjx\" (UniqueName: \"kubernetes.io/projected/381e208b-07bc-42e2-88b1-99ae1ae4f907-kube-api-access-xwwjx\") pod \"glance-db-create-5l7x5\" (UID: \"381e208b-07bc-42e2-88b1-99ae1ae4f907\") " pod="openstack/glance-db-create-5l7x5" Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.831857 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-vhnnx"] Oct 03 08:55:28 crc kubenswrapper[4899]: I1003 08:55:28.872848 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5l7x5" Oct 03 08:55:29 crc kubenswrapper[4899]: I1003 08:55:29.049952 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-kjd74"] Oct 03 08:55:29 crc kubenswrapper[4899]: I1003 08:55:29.271967 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-5l7x5"] Oct 03 08:55:29 crc kubenswrapper[4899]: I1003 08:55:29.372655 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:29 crc kubenswrapper[4899]: E1003 08:55:29.372844 4899 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 08:55:29 crc kubenswrapper[4899]: E1003 08:55:29.372866 4899 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 08:55:29 crc kubenswrapper[4899]: E1003 08:55:29.372940 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift podName:a9725bfb-2b4a-49d7-b4d8-c2235583f28f nodeName:}" failed. No retries permitted until 2025-10-03 08:55:37.372921709 +0000 UTC m=+911.480406662 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift") pod "swift-storage-0" (UID: "a9725bfb-2b4a-49d7-b4d8-c2235583f28f") : configmap "swift-ring-files" not found Oct 03 08:55:29 crc kubenswrapper[4899]: I1003 08:55:29.412632 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-vhnnx" event={"ID":"dcceacb3-5a24-43fc-b90b-036ae2fd715a","Type":"ContainerStarted","Data":"c58ba241418378278a35e89fd908da3c97aa1a5d5ccaec22c8e36cb46a319c32"} Oct 03 08:55:30 crc kubenswrapper[4899]: I1003 08:55:30.420490 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-5l7x5" event={"ID":"381e208b-07bc-42e2-88b1-99ae1ae4f907","Type":"ContainerStarted","Data":"a99f0a4bd6a6fa47d1b656aff3ce3e9796ee421430a32590fa7348f98f7e9793"} Oct 03 08:55:30 crc kubenswrapper[4899]: I1003 08:55:30.421432 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-kjd74" event={"ID":"16d9f858-ae62-4d37-b2de-97fb7d70d6a3","Type":"ContainerStarted","Data":"b74cbc9cdfbf8a410b2d7586d544c52a0accf4a59fa18f51a0cee595fa9de9f6"} Oct 03 08:55:30 crc kubenswrapper[4899]: I1003 08:55:30.931690 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:55:30 crc kubenswrapper[4899]: I1003 08:55:30.976918 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-mdfwg"] Oct 03 08:55:30 crc kubenswrapper[4899]: I1003 08:55:30.977251 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-mdfwg" podUID="fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12" containerName="dnsmasq-dns" containerID="cri-o://8ff74515d7f328a8d302764184e3dd7fd83e1a7de713fddb60c74ee39c059b25" gracePeriod=10 Oct 03 08:55:31 crc kubenswrapper[4899]: I1003 08:55:31.336430 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8554648995-mdfwg" podUID="fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: connect: connection refused" Oct 03 08:55:31 crc kubenswrapper[4899]: I1003 08:55:31.429595 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-vhnnx" event={"ID":"dcceacb3-5a24-43fc-b90b-036ae2fd715a","Type":"ContainerStarted","Data":"a060b39b0ba8ee23d110bfe8b99d60fbb2b8beb754ebf9e2a56d6d22b341f980"} Oct 03 08:55:31 crc kubenswrapper[4899]: I1003 08:55:31.433376 4899 generic.go:334] "Generic (PLEG): container finished" podID="fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12" containerID="8ff74515d7f328a8d302764184e3dd7fd83e1a7de713fddb60c74ee39c059b25" exitCode=0 Oct 03 08:55:31 crc kubenswrapper[4899]: I1003 08:55:31.433462 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-mdfwg" event={"ID":"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12","Type":"ContainerDied","Data":"8ff74515d7f328a8d302764184e3dd7fd83e1a7de713fddb60c74ee39c059b25"} Oct 03 08:55:31 crc kubenswrapper[4899]: I1003 08:55:31.444474 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-vhnnx" podStartSLOduration=3.444451353 podStartE2EDuration="3.444451353s" podCreationTimestamp="2025-10-03 08:55:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:55:31.442084949 +0000 UTC m=+905.549569902" watchObservedRunningTime="2025-10-03 08:55:31.444451353 +0000 UTC m=+905.551936326" Oct 03 08:55:31 crc kubenswrapper[4899]: I1003 08:55:31.935864 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.140979 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.224375 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-config\") pod \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.225570 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-ovsdbserver-sb\") pod \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.225685 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58j7n\" (UniqueName: \"kubernetes.io/projected/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-kube-api-access-58j7n\") pod \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.225791 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-dns-svc\") pod \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.225854 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-ovsdbserver-nb\") pod \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\" (UID: \"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12\") " Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.245289 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-kube-api-access-58j7n" (OuterVolumeSpecName: "kube-api-access-58j7n") pod "fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12" (UID: "fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12"). InnerVolumeSpecName "kube-api-access-58j7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.270075 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12" (UID: "fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.279142 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12" (UID: "fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.288635 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-config" (OuterVolumeSpecName: "config") pod "fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12" (UID: "fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.293226 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12" (UID: "fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.329330 4899 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.329376 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.329397 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.329409 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.329422 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58j7n\" (UniqueName: \"kubernetes.io/projected/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12-kube-api-access-58j7n\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.449513 4899 generic.go:334] "Generic (PLEG): container finished" podID="381e208b-07bc-42e2-88b1-99ae1ae4f907" containerID="047b06b08e76bba97016860a234f15127b5c44f6bb356006139af0faf0ff868f" exitCode=0 Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.449596 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-5l7x5" event={"ID":"381e208b-07bc-42e2-88b1-99ae1ae4f907","Type":"ContainerDied","Data":"047b06b08e76bba97016860a234f15127b5c44f6bb356006139af0faf0ff868f"} Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.453164 4899 generic.go:334] "Generic (PLEG): container finished" podID="16d9f858-ae62-4d37-b2de-97fb7d70d6a3" containerID="69e306ba4bb82944f7302882da65f0fb73befd12eba48c6160a6985e1568a072" exitCode=0 Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.453295 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-kjd74" event={"ID":"16d9f858-ae62-4d37-b2de-97fb7d70d6a3","Type":"ContainerDied","Data":"69e306ba4bb82944f7302882da65f0fb73befd12eba48c6160a6985e1568a072"} Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.455980 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-mdfwg" event={"ID":"fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12","Type":"ContainerDied","Data":"f874c456f94d8b6dc6adb173df0388b922927aad780bc7297f689d48f1dcf9e0"} Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.456050 4899 scope.go:117] "RemoveContainer" containerID="8ff74515d7f328a8d302764184e3dd7fd83e1a7de713fddb60c74ee39c059b25" Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.456133 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-mdfwg" Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.460232 4899 generic.go:334] "Generic (PLEG): container finished" podID="dcceacb3-5a24-43fc-b90b-036ae2fd715a" containerID="a060b39b0ba8ee23d110bfe8b99d60fbb2b8beb754ebf9e2a56d6d22b341f980" exitCode=0 Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.460280 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-vhnnx" event={"ID":"dcceacb3-5a24-43fc-b90b-036ae2fd715a","Type":"ContainerDied","Data":"a060b39b0ba8ee23d110bfe8b99d60fbb2b8beb754ebf9e2a56d6d22b341f980"} Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.516865 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-mdfwg"] Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.522694 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-mdfwg"] Oct 03 08:55:32 crc kubenswrapper[4899]: I1003 08:55:32.538207 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12" path="/var/lib/kubelet/pods/fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12/volumes" Oct 03 08:55:33 crc kubenswrapper[4899]: I1003 08:55:33.863209 4899 scope.go:117] "RemoveContainer" containerID="5add2446bebd235e2fe87e50feb8b19092da2aa78b5af6b2de9ead86af59a860" Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.091434 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-kjd74" Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.112750 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-vhnnx" Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.121859 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5l7x5" Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.261762 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6rcc\" (UniqueName: \"kubernetes.io/projected/dcceacb3-5a24-43fc-b90b-036ae2fd715a-kube-api-access-c6rcc\") pod \"dcceacb3-5a24-43fc-b90b-036ae2fd715a\" (UID: \"dcceacb3-5a24-43fc-b90b-036ae2fd715a\") " Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.262430 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwwjx\" (UniqueName: \"kubernetes.io/projected/381e208b-07bc-42e2-88b1-99ae1ae4f907-kube-api-access-xwwjx\") pod \"381e208b-07bc-42e2-88b1-99ae1ae4f907\" (UID: \"381e208b-07bc-42e2-88b1-99ae1ae4f907\") " Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.262583 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mcxm5\" (UniqueName: \"kubernetes.io/projected/16d9f858-ae62-4d37-b2de-97fb7d70d6a3-kube-api-access-mcxm5\") pod \"16d9f858-ae62-4d37-b2de-97fb7d70d6a3\" (UID: \"16d9f858-ae62-4d37-b2de-97fb7d70d6a3\") " Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.270179 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16d9f858-ae62-4d37-b2de-97fb7d70d6a3-kube-api-access-mcxm5" (OuterVolumeSpecName: "kube-api-access-mcxm5") pod "16d9f858-ae62-4d37-b2de-97fb7d70d6a3" (UID: "16d9f858-ae62-4d37-b2de-97fb7d70d6a3"). InnerVolumeSpecName "kube-api-access-mcxm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.270551 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcceacb3-5a24-43fc-b90b-036ae2fd715a-kube-api-access-c6rcc" (OuterVolumeSpecName: "kube-api-access-c6rcc") pod "dcceacb3-5a24-43fc-b90b-036ae2fd715a" (UID: "dcceacb3-5a24-43fc-b90b-036ae2fd715a"). InnerVolumeSpecName "kube-api-access-c6rcc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.272074 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/381e208b-07bc-42e2-88b1-99ae1ae4f907-kube-api-access-xwwjx" (OuterVolumeSpecName: "kube-api-access-xwwjx") pod "381e208b-07bc-42e2-88b1-99ae1ae4f907" (UID: "381e208b-07bc-42e2-88b1-99ae1ae4f907"). InnerVolumeSpecName "kube-api-access-xwwjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.365437 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mcxm5\" (UniqueName: \"kubernetes.io/projected/16d9f858-ae62-4d37-b2de-97fb7d70d6a3-kube-api-access-mcxm5\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.365735 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6rcc\" (UniqueName: \"kubernetes.io/projected/dcceacb3-5a24-43fc-b90b-036ae2fd715a-kube-api-access-c6rcc\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.365827 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwwjx\" (UniqueName: \"kubernetes.io/projected/381e208b-07bc-42e2-88b1-99ae1ae4f907-kube-api-access-xwwjx\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.477831 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-vhnnx" event={"ID":"dcceacb3-5a24-43fc-b90b-036ae2fd715a","Type":"ContainerDied","Data":"c58ba241418378278a35e89fd908da3c97aa1a5d5ccaec22c8e36cb46a319c32"} Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.477869 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c58ba241418378278a35e89fd908da3c97aa1a5d5ccaec22c8e36cb46a319c32" Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.477941 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-vhnnx" Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.481121 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-s5xff" event={"ID":"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca","Type":"ContainerStarted","Data":"6b1855b0767bba9ccf52b5f88d8df79b76eb6deb00acae2c9f66c8f7917df6ee"} Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.482982 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5l7x5" Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.483196 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-5l7x5" event={"ID":"381e208b-07bc-42e2-88b1-99ae1ae4f907","Type":"ContainerDied","Data":"a99f0a4bd6a6fa47d1b656aff3ce3e9796ee421430a32590fa7348f98f7e9793"} Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.483223 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a99f0a4bd6a6fa47d1b656aff3ce3e9796ee421430a32590fa7348f98f7e9793" Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.484872 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-kjd74" Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.484869 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-kjd74" event={"ID":"16d9f858-ae62-4d37-b2de-97fb7d70d6a3","Type":"ContainerDied","Data":"b74cbc9cdfbf8a410b2d7586d544c52a0accf4a59fa18f51a0cee595fa9de9f6"} Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.484920 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b74cbc9cdfbf8a410b2d7586d544c52a0accf4a59fa18f51a0cee595fa9de9f6" Oct 03 08:55:34 crc kubenswrapper[4899]: I1003 08:55:34.503755 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-s5xff" podStartSLOduration=1.715006347 podStartE2EDuration="9.503736488s" podCreationTimestamp="2025-10-03 08:55:25 +0000 UTC" firstStartedPulling="2025-10-03 08:55:26.112552501 +0000 UTC m=+900.220037454" lastFinishedPulling="2025-10-03 08:55:33.901282642 +0000 UTC m=+908.008767595" observedRunningTime="2025-10-03 08:55:34.499469272 +0000 UTC m=+908.606954235" watchObservedRunningTime="2025-10-03 08:55:34.503736488 +0000 UTC m=+908.611221441" Oct 03 08:55:36 crc kubenswrapper[4899]: I1003 08:55:36.501623 4899 generic.go:334] "Generic (PLEG): container finished" podID="cff2733b-858c-4578-abcb-a0c503b556d3" containerID="ee34ae64a9b2b48f629d7d07e1e2606394e7e159388889c38dbf46c3b11f31cc" exitCode=0 Oct 03 08:55:36 crc kubenswrapper[4899]: I1003 08:55:36.501704 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cff2733b-858c-4578-abcb-a0c503b556d3","Type":"ContainerDied","Data":"ee34ae64a9b2b48f629d7d07e1e2606394e7e159388889c38dbf46c3b11f31cc"} Oct 03 08:55:36 crc kubenswrapper[4899]: I1003 08:55:36.507785 4899 generic.go:334] "Generic (PLEG): container finished" podID="2d0a71f9-b4af-49f7-b2fe-267a78a4c086" containerID="473248a8c249a4ad67707f5370440c74ec966684cf8db9efdffc15efa23aea2f" exitCode=0 Oct 03 08:55:36 crc kubenswrapper[4899]: I1003 08:55:36.507832 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2d0a71f9-b4af-49f7-b2fe-267a78a4c086","Type":"ContainerDied","Data":"473248a8c249a4ad67707f5370440c74ec966684cf8db9efdffc15efa23aea2f"} Oct 03 08:55:37 crc kubenswrapper[4899]: I1003 08:55:37.417842 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:37 crc kubenswrapper[4899]: E1003 08:55:37.418424 4899 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 08:55:37 crc kubenswrapper[4899]: E1003 08:55:37.418450 4899 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 08:55:37 crc kubenswrapper[4899]: E1003 08:55:37.418520 4899 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift podName:a9725bfb-2b4a-49d7-b4d8-c2235583f28f nodeName:}" failed. No retries permitted until 2025-10-03 08:55:53.418498779 +0000 UTC m=+927.525983732 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift") pod "swift-storage-0" (UID: "a9725bfb-2b4a-49d7-b4d8-c2235583f28f") : configmap "swift-ring-files" not found Oct 03 08:55:37 crc kubenswrapper[4899]: I1003 08:55:37.516360 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cff2733b-858c-4578-abcb-a0c503b556d3","Type":"ContainerStarted","Data":"db44beff9f0057405792026074513811d0b482b82b223a76e2fae8a18ac6215d"} Oct 03 08:55:37 crc kubenswrapper[4899]: I1003 08:55:37.516825 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:55:37 crc kubenswrapper[4899]: I1003 08:55:37.518546 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2d0a71f9-b4af-49f7-b2fe-267a78a4c086","Type":"ContainerStarted","Data":"938fc7c3cbc765fb2a1b2e95e7e56d472486dba80c36a00504469c810523131e"} Oct 03 08:55:37 crc kubenswrapper[4899]: I1003 08:55:37.518864 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 03 08:55:37 crc kubenswrapper[4899]: I1003 08:55:37.544960 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=43.273370189 podStartE2EDuration="54.544942162s" podCreationTimestamp="2025-10-03 08:54:43 +0000 UTC" firstStartedPulling="2025-10-03 08:54:51.973185161 +0000 UTC m=+866.080670114" lastFinishedPulling="2025-10-03 08:55:03.244757134 +0000 UTC m=+877.352242087" observedRunningTime="2025-10-03 08:55:37.539742348 +0000 UTC m=+911.647227321" watchObservedRunningTime="2025-10-03 08:55:37.544942162 +0000 UTC m=+911.652427115" Oct 03 08:55:37 crc kubenswrapper[4899]: I1003 08:55:37.571538 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=43.279215782 podStartE2EDuration="54.571517509s" podCreationTimestamp="2025-10-03 08:54:43 +0000 UTC" firstStartedPulling="2025-10-03 08:54:51.973188631 +0000 UTC m=+866.080673584" lastFinishedPulling="2025-10-03 08:55:03.265490358 +0000 UTC m=+877.372975311" observedRunningTime="2025-10-03 08:55:37.565738618 +0000 UTC m=+911.673223601" watchObservedRunningTime="2025-10-03 08:55:37.571517509 +0000 UTC m=+911.679002462" Oct 03 08:55:38 crc kubenswrapper[4899]: I1003 08:55:38.705083 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-1db7-account-create-jljht"] Oct 03 08:55:38 crc kubenswrapper[4899]: E1003 08:55:38.705723 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="381e208b-07bc-42e2-88b1-99ae1ae4f907" containerName="mariadb-database-create" Oct 03 08:55:38 crc kubenswrapper[4899]: I1003 08:55:38.705736 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="381e208b-07bc-42e2-88b1-99ae1ae4f907" containerName="mariadb-database-create" Oct 03 08:55:38 crc kubenswrapper[4899]: E1003 08:55:38.705750 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16d9f858-ae62-4d37-b2de-97fb7d70d6a3" containerName="mariadb-database-create" Oct 03 08:55:38 crc kubenswrapper[4899]: I1003 08:55:38.705756 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="16d9f858-ae62-4d37-b2de-97fb7d70d6a3" containerName="mariadb-database-create" Oct 03 08:55:38 crc kubenswrapper[4899]: E1003 08:55:38.705767 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcceacb3-5a24-43fc-b90b-036ae2fd715a" containerName="mariadb-database-create" Oct 03 08:55:38 crc kubenswrapper[4899]: I1003 08:55:38.705775 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcceacb3-5a24-43fc-b90b-036ae2fd715a" containerName="mariadb-database-create" Oct 03 08:55:38 crc kubenswrapper[4899]: E1003 08:55:38.705783 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12" containerName="init" Oct 03 08:55:38 crc kubenswrapper[4899]: I1003 08:55:38.705789 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12" containerName="init" Oct 03 08:55:38 crc kubenswrapper[4899]: E1003 08:55:38.705805 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12" containerName="dnsmasq-dns" Oct 03 08:55:38 crc kubenswrapper[4899]: I1003 08:55:38.705810 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12" containerName="dnsmasq-dns" Oct 03 08:55:38 crc kubenswrapper[4899]: I1003 08:55:38.705975 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc52dbf7-ca90-4c6a-b8c6-2ad5c1e34a12" containerName="dnsmasq-dns" Oct 03 08:55:38 crc kubenswrapper[4899]: I1003 08:55:38.705986 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcceacb3-5a24-43fc-b90b-036ae2fd715a" containerName="mariadb-database-create" Oct 03 08:55:38 crc kubenswrapper[4899]: I1003 08:55:38.706004 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="381e208b-07bc-42e2-88b1-99ae1ae4f907" containerName="mariadb-database-create" Oct 03 08:55:38 crc kubenswrapper[4899]: I1003 08:55:38.706014 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="16d9f858-ae62-4d37-b2de-97fb7d70d6a3" containerName="mariadb-database-create" Oct 03 08:55:38 crc kubenswrapper[4899]: I1003 08:55:38.706529 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1db7-account-create-jljht" Oct 03 08:55:38 crc kubenswrapper[4899]: I1003 08:55:38.708721 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 03 08:55:38 crc kubenswrapper[4899]: I1003 08:55:38.717474 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-1db7-account-create-jljht"] Oct 03 08:55:38 crc kubenswrapper[4899]: I1003 08:55:38.738289 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxs6l\" (UniqueName: \"kubernetes.io/projected/61fd0b37-2c03-4fde-a030-84387c3a10c0-kube-api-access-lxs6l\") pod \"glance-1db7-account-create-jljht\" (UID: \"61fd0b37-2c03-4fde-a030-84387c3a10c0\") " pod="openstack/glance-1db7-account-create-jljht" Oct 03 08:55:38 crc kubenswrapper[4899]: I1003 08:55:38.839870 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxs6l\" (UniqueName: \"kubernetes.io/projected/61fd0b37-2c03-4fde-a030-84387c3a10c0-kube-api-access-lxs6l\") pod \"glance-1db7-account-create-jljht\" (UID: \"61fd0b37-2c03-4fde-a030-84387c3a10c0\") " pod="openstack/glance-1db7-account-create-jljht" Oct 03 08:55:38 crc kubenswrapper[4899]: I1003 08:55:38.860133 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxs6l\" (UniqueName: \"kubernetes.io/projected/61fd0b37-2c03-4fde-a030-84387c3a10c0-kube-api-access-lxs6l\") pod \"glance-1db7-account-create-jljht\" (UID: \"61fd0b37-2c03-4fde-a030-84387c3a10c0\") " pod="openstack/glance-1db7-account-create-jljht" Oct 03 08:55:39 crc kubenswrapper[4899]: I1003 08:55:39.028966 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1db7-account-create-jljht" Oct 03 08:55:39 crc kubenswrapper[4899]: I1003 08:55:39.557165 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-1db7-account-create-jljht"] Oct 03 08:55:40 crc kubenswrapper[4899]: I1003 08:55:40.562789 4899 generic.go:334] "Generic (PLEG): container finished" podID="61fd0b37-2c03-4fde-a030-84387c3a10c0" containerID="150fc358dde7215bc721566308068d85fc9d654342f7f6f39fdea20229ed9fd3" exitCode=0 Oct 03 08:55:40 crc kubenswrapper[4899]: I1003 08:55:40.562876 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-1db7-account-create-jljht" event={"ID":"61fd0b37-2c03-4fde-a030-84387c3a10c0","Type":"ContainerDied","Data":"150fc358dde7215bc721566308068d85fc9d654342f7f6f39fdea20229ed9fd3"} Oct 03 08:55:40 crc kubenswrapper[4899]: I1003 08:55:40.563096 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-1db7-account-create-jljht" event={"ID":"61fd0b37-2c03-4fde-a030-84387c3a10c0","Type":"ContainerStarted","Data":"4ef89c1faa0c82285c0d25b19107be2616e8c5ac12daaff72db878036d6813be"} Oct 03 08:55:41 crc kubenswrapper[4899]: I1003 08:55:41.573660 4899 generic.go:334] "Generic (PLEG): container finished" podID="79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca" containerID="6b1855b0767bba9ccf52b5f88d8df79b76eb6deb00acae2c9f66c8f7917df6ee" exitCode=0 Oct 03 08:55:41 crc kubenswrapper[4899]: I1003 08:55:41.573756 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-s5xff" event={"ID":"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca","Type":"ContainerDied","Data":"6b1855b0767bba9ccf52b5f88d8df79b76eb6deb00acae2c9f66c8f7917df6ee"} Oct 03 08:55:41 crc kubenswrapper[4899]: I1003 08:55:41.875828 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1db7-account-create-jljht" Oct 03 08:55:41 crc kubenswrapper[4899]: I1003 08:55:41.897840 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxs6l\" (UniqueName: \"kubernetes.io/projected/61fd0b37-2c03-4fde-a030-84387c3a10c0-kube-api-access-lxs6l\") pod \"61fd0b37-2c03-4fde-a030-84387c3a10c0\" (UID: \"61fd0b37-2c03-4fde-a030-84387c3a10c0\") " Oct 03 08:55:41 crc kubenswrapper[4899]: I1003 08:55:41.903608 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61fd0b37-2c03-4fde-a030-84387c3a10c0-kube-api-access-lxs6l" (OuterVolumeSpecName: "kube-api-access-lxs6l") pod "61fd0b37-2c03-4fde-a030-84387c3a10c0" (UID: "61fd0b37-2c03-4fde-a030-84387c3a10c0"). InnerVolumeSpecName "kube-api-access-lxs6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.000455 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxs6l\" (UniqueName: \"kubernetes.io/projected/61fd0b37-2c03-4fde-a030-84387c3a10c0-kube-api-access-lxs6l\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.198691 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.198759 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.583088 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-1db7-account-create-jljht" event={"ID":"61fd0b37-2c03-4fde-a030-84387c3a10c0","Type":"ContainerDied","Data":"4ef89c1faa0c82285c0d25b19107be2616e8c5ac12daaff72db878036d6813be"} Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.583145 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ef89c1faa0c82285c0d25b19107be2616e8c5ac12daaff72db878036d6813be" Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.583108 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1db7-account-create-jljht" Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.878746 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.913545 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-ring-data-devices\") pod \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.913645 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-swiftconf\") pod \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.913744 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mbmvc\" (UniqueName: \"kubernetes.io/projected/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-kube-api-access-mbmvc\") pod \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.913782 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-combined-ca-bundle\") pod \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.913836 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-etc-swift\") pod \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.913873 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-dispersionconf\") pod \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.913907 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-scripts\") pod \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\" (UID: \"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca\") " Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.914596 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca" (UID: "79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.914917 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca" (UID: "79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.923985 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-kube-api-access-mbmvc" (OuterVolumeSpecName: "kube-api-access-mbmvc") pod "79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca" (UID: "79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca"). InnerVolumeSpecName "kube-api-access-mbmvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.925273 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca" (UID: "79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.940176 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca" (UID: "79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.941305 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca" (UID: "79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:55:42 crc kubenswrapper[4899]: I1003 08:55:42.958739 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-scripts" (OuterVolumeSpecName: "scripts") pod "79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca" (UID: "79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.016336 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mbmvc\" (UniqueName: \"kubernetes.io/projected/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-kube-api-access-mbmvc\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.016378 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.016390 4899 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.016402 4899 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.016414 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.016424 4899 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.016434 4899 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.601433 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-s5xff" event={"ID":"79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca","Type":"ContainerDied","Data":"7bc857ce8487d879bc42f7d7bdc058b0e3a8e3418bf3d68d16a19e3a3c540082"} Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.601483 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7bc857ce8487d879bc42f7d7bdc058b0e3a8e3418bf3d68d16a19e3a3c540082" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.601538 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-s5xff" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.840698 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-r7dz5"] Oct 03 08:55:43 crc kubenswrapper[4899]: E1003 08:55:43.841376 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca" containerName="swift-ring-rebalance" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.841483 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca" containerName="swift-ring-rebalance" Oct 03 08:55:43 crc kubenswrapper[4899]: E1003 08:55:43.841592 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61fd0b37-2c03-4fde-a030-84387c3a10c0" containerName="mariadb-account-create" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.841676 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="61fd0b37-2c03-4fde-a030-84387c3a10c0" containerName="mariadb-account-create" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.841958 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca" containerName="swift-ring-rebalance" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.842059 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="61fd0b37-2c03-4fde-a030-84387c3a10c0" containerName="mariadb-account-create" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.842816 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-r7dz5" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.845399 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.846619 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-9s7jc" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.852383 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-r7dz5"] Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.932672 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwmd9\" (UniqueName: \"kubernetes.io/projected/e95726c5-31b4-47eb-9e32-eec52266c460-kube-api-access-hwmd9\") pod \"glance-db-sync-r7dz5\" (UID: \"e95726c5-31b4-47eb-9e32-eec52266c460\") " pod="openstack/glance-db-sync-r7dz5" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.933155 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e95726c5-31b4-47eb-9e32-eec52266c460-combined-ca-bundle\") pod \"glance-db-sync-r7dz5\" (UID: \"e95726c5-31b4-47eb-9e32-eec52266c460\") " pod="openstack/glance-db-sync-r7dz5" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.933223 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e95726c5-31b4-47eb-9e32-eec52266c460-config-data\") pod \"glance-db-sync-r7dz5\" (UID: \"e95726c5-31b4-47eb-9e32-eec52266c460\") " pod="openstack/glance-db-sync-r7dz5" Oct 03 08:55:43 crc kubenswrapper[4899]: I1003 08:55:43.933282 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e95726c5-31b4-47eb-9e32-eec52266c460-db-sync-config-data\") pod \"glance-db-sync-r7dz5\" (UID: \"e95726c5-31b4-47eb-9e32-eec52266c460\") " pod="openstack/glance-db-sync-r7dz5" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.022766 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-wfz45" podUID="89364578-24ad-4c19-8e0b-ba123f58f4eb" containerName="ovn-controller" probeResult="failure" output=< Oct 03 08:55:44 crc kubenswrapper[4899]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 03 08:55:44 crc kubenswrapper[4899]: > Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.034245 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e95726c5-31b4-47eb-9e32-eec52266c460-config-data\") pod \"glance-db-sync-r7dz5\" (UID: \"e95726c5-31b4-47eb-9e32-eec52266c460\") " pod="openstack/glance-db-sync-r7dz5" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.034328 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e95726c5-31b4-47eb-9e32-eec52266c460-db-sync-config-data\") pod \"glance-db-sync-r7dz5\" (UID: \"e95726c5-31b4-47eb-9e32-eec52266c460\") " pod="openstack/glance-db-sync-r7dz5" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.034379 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwmd9\" (UniqueName: \"kubernetes.io/projected/e95726c5-31b4-47eb-9e32-eec52266c460-kube-api-access-hwmd9\") pod \"glance-db-sync-r7dz5\" (UID: \"e95726c5-31b4-47eb-9e32-eec52266c460\") " pod="openstack/glance-db-sync-r7dz5" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.034429 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e95726c5-31b4-47eb-9e32-eec52266c460-combined-ca-bundle\") pod \"glance-db-sync-r7dz5\" (UID: \"e95726c5-31b4-47eb-9e32-eec52266c460\") " pod="openstack/glance-db-sync-r7dz5" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.038759 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e95726c5-31b4-47eb-9e32-eec52266c460-db-sync-config-data\") pod \"glance-db-sync-r7dz5\" (UID: \"e95726c5-31b4-47eb-9e32-eec52266c460\") " pod="openstack/glance-db-sync-r7dz5" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.039215 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e95726c5-31b4-47eb-9e32-eec52266c460-combined-ca-bundle\") pod \"glance-db-sync-r7dz5\" (UID: \"e95726c5-31b4-47eb-9e32-eec52266c460\") " pod="openstack/glance-db-sync-r7dz5" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.044629 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e95726c5-31b4-47eb-9e32-eec52266c460-config-data\") pod \"glance-db-sync-r7dz5\" (UID: \"e95726c5-31b4-47eb-9e32-eec52266c460\") " pod="openstack/glance-db-sync-r7dz5" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.045552 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.049178 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-c7dff" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.056009 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwmd9\" (UniqueName: \"kubernetes.io/projected/e95726c5-31b4-47eb-9e32-eec52266c460-kube-api-access-hwmd9\") pod \"glance-db-sync-r7dz5\" (UID: \"e95726c5-31b4-47eb-9e32-eec52266c460\") " pod="openstack/glance-db-sync-r7dz5" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.161341 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-r7dz5" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.287908 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-wfz45-config-rvt75"] Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.289506 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.294801 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.299152 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-wfz45-config-rvt75"] Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.340814 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fe83f382-2ea0-46bb-9179-9114add9f97e-var-run-ovn\") pod \"ovn-controller-wfz45-config-rvt75\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.340922 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fe83f382-2ea0-46bb-9179-9114add9f97e-additional-scripts\") pod \"ovn-controller-wfz45-config-rvt75\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.340941 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe83f382-2ea0-46bb-9179-9114add9f97e-scripts\") pod \"ovn-controller-wfz45-config-rvt75\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.340976 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fe83f382-2ea0-46bb-9179-9114add9f97e-var-log-ovn\") pod \"ovn-controller-wfz45-config-rvt75\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.341054 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fe83f382-2ea0-46bb-9179-9114add9f97e-var-run\") pod \"ovn-controller-wfz45-config-rvt75\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.341073 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4k65z\" (UniqueName: \"kubernetes.io/projected/fe83f382-2ea0-46bb-9179-9114add9f97e-kube-api-access-4k65z\") pod \"ovn-controller-wfz45-config-rvt75\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.442699 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fe83f382-2ea0-46bb-9179-9114add9f97e-var-run\") pod \"ovn-controller-wfz45-config-rvt75\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.442749 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4k65z\" (UniqueName: \"kubernetes.io/projected/fe83f382-2ea0-46bb-9179-9114add9f97e-kube-api-access-4k65z\") pod \"ovn-controller-wfz45-config-rvt75\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.442805 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fe83f382-2ea0-46bb-9179-9114add9f97e-var-run-ovn\") pod \"ovn-controller-wfz45-config-rvt75\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.442851 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fe83f382-2ea0-46bb-9179-9114add9f97e-additional-scripts\") pod \"ovn-controller-wfz45-config-rvt75\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.442872 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe83f382-2ea0-46bb-9179-9114add9f97e-scripts\") pod \"ovn-controller-wfz45-config-rvt75\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.442934 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fe83f382-2ea0-46bb-9179-9114add9f97e-var-log-ovn\") pod \"ovn-controller-wfz45-config-rvt75\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.443063 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fe83f382-2ea0-46bb-9179-9114add9f97e-var-run\") pod \"ovn-controller-wfz45-config-rvt75\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.443131 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fe83f382-2ea0-46bb-9179-9114add9f97e-var-log-ovn\") pod \"ovn-controller-wfz45-config-rvt75\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.443193 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fe83f382-2ea0-46bb-9179-9114add9f97e-var-run-ovn\") pod \"ovn-controller-wfz45-config-rvt75\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.443592 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fe83f382-2ea0-46bb-9179-9114add9f97e-additional-scripts\") pod \"ovn-controller-wfz45-config-rvt75\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.444996 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe83f382-2ea0-46bb-9179-9114add9f97e-scripts\") pod \"ovn-controller-wfz45-config-rvt75\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.462662 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4k65z\" (UniqueName: \"kubernetes.io/projected/fe83f382-2ea0-46bb-9179-9114add9f97e-kube-api-access-4k65z\") pod \"ovn-controller-wfz45-config-rvt75\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.616804 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:44 crc kubenswrapper[4899]: I1003 08:55:44.741371 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-r7dz5"] Oct 03 08:55:44 crc kubenswrapper[4899]: W1003 08:55:44.750159 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode95726c5_31b4_47eb_9e32_eec52266c460.slice/crio-d14a983f481fac5026a25f2d14d022843c652f8683d4aed7dc370d8eefd424b3 WatchSource:0}: Error finding container d14a983f481fac5026a25f2d14d022843c652f8683d4aed7dc370d8eefd424b3: Status 404 returned error can't find the container with id d14a983f481fac5026a25f2d14d022843c652f8683d4aed7dc370d8eefd424b3 Oct 03 08:55:45 crc kubenswrapper[4899]: I1003 08:55:45.043236 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-wfz45-config-rvt75"] Oct 03 08:55:45 crc kubenswrapper[4899]: I1003 08:55:45.616980 4899 generic.go:334] "Generic (PLEG): container finished" podID="fe83f382-2ea0-46bb-9179-9114add9f97e" containerID="938b832fd628929ec85ba42f8de6d491b92c6d3c49108bc2195100c9bc0ddd91" exitCode=0 Oct 03 08:55:45 crc kubenswrapper[4899]: I1003 08:55:45.617272 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-wfz45-config-rvt75" event={"ID":"fe83f382-2ea0-46bb-9179-9114add9f97e","Type":"ContainerDied","Data":"938b832fd628929ec85ba42f8de6d491b92c6d3c49108bc2195100c9bc0ddd91"} Oct 03 08:55:45 crc kubenswrapper[4899]: I1003 08:55:45.617300 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-wfz45-config-rvt75" event={"ID":"fe83f382-2ea0-46bb-9179-9114add9f97e","Type":"ContainerStarted","Data":"c159eeecbf8b7bfa17150d23e554ddbf3e25e1cbeba71878eccefb533f848523"} Oct 03 08:55:45 crc kubenswrapper[4899]: I1003 08:55:45.618988 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-r7dz5" event={"ID":"e95726c5-31b4-47eb-9e32-eec52266c460","Type":"ContainerStarted","Data":"d14a983f481fac5026a25f2d14d022843c652f8683d4aed7dc370d8eefd424b3"} Oct 03 08:55:46 crc kubenswrapper[4899]: I1003 08:55:46.917464 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.008397 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4k65z\" (UniqueName: \"kubernetes.io/projected/fe83f382-2ea0-46bb-9179-9114add9f97e-kube-api-access-4k65z\") pod \"fe83f382-2ea0-46bb-9179-9114add9f97e\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.008451 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fe83f382-2ea0-46bb-9179-9114add9f97e-var-log-ovn\") pod \"fe83f382-2ea0-46bb-9179-9114add9f97e\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.008510 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fe83f382-2ea0-46bb-9179-9114add9f97e-var-run-ovn\") pod \"fe83f382-2ea0-46bb-9179-9114add9f97e\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.008538 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe83f382-2ea0-46bb-9179-9114add9f97e-scripts\") pod \"fe83f382-2ea0-46bb-9179-9114add9f97e\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.008559 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fe83f382-2ea0-46bb-9179-9114add9f97e-var-run\") pod \"fe83f382-2ea0-46bb-9179-9114add9f97e\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.008820 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fe83f382-2ea0-46bb-9179-9114add9f97e-additional-scripts\") pod \"fe83f382-2ea0-46bb-9179-9114add9f97e\" (UID: \"fe83f382-2ea0-46bb-9179-9114add9f97e\") " Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.009000 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fe83f382-2ea0-46bb-9179-9114add9f97e-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "fe83f382-2ea0-46bb-9179-9114add9f97e" (UID: "fe83f382-2ea0-46bb-9179-9114add9f97e"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.009043 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fe83f382-2ea0-46bb-9179-9114add9f97e-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "fe83f382-2ea0-46bb-9179-9114add9f97e" (UID: "fe83f382-2ea0-46bb-9179-9114add9f97e"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.009346 4899 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fe83f382-2ea0-46bb-9179-9114add9f97e-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.009366 4899 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fe83f382-2ea0-46bb-9179-9114add9f97e-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.009987 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe83f382-2ea0-46bb-9179-9114add9f97e-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "fe83f382-2ea0-46bb-9179-9114add9f97e" (UID: "fe83f382-2ea0-46bb-9179-9114add9f97e"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.009086 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fe83f382-2ea0-46bb-9179-9114add9f97e-var-run" (OuterVolumeSpecName: "var-run") pod "fe83f382-2ea0-46bb-9179-9114add9f97e" (UID: "fe83f382-2ea0-46bb-9179-9114add9f97e"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.010207 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe83f382-2ea0-46bb-9179-9114add9f97e-scripts" (OuterVolumeSpecName: "scripts") pod "fe83f382-2ea0-46bb-9179-9114add9f97e" (UID: "fe83f382-2ea0-46bb-9179-9114add9f97e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.027199 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe83f382-2ea0-46bb-9179-9114add9f97e-kube-api-access-4k65z" (OuterVolumeSpecName: "kube-api-access-4k65z") pod "fe83f382-2ea0-46bb-9179-9114add9f97e" (UID: "fe83f382-2ea0-46bb-9179-9114add9f97e"). InnerVolumeSpecName "kube-api-access-4k65z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.110855 4899 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fe83f382-2ea0-46bb-9179-9114add9f97e-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.110912 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4k65z\" (UniqueName: \"kubernetes.io/projected/fe83f382-2ea0-46bb-9179-9114add9f97e-kube-api-access-4k65z\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.110927 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe83f382-2ea0-46bb-9179-9114add9f97e-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.110939 4899 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fe83f382-2ea0-46bb-9179-9114add9f97e-var-run\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.636457 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-wfz45-config-rvt75" event={"ID":"fe83f382-2ea0-46bb-9179-9114add9f97e","Type":"ContainerDied","Data":"c159eeecbf8b7bfa17150d23e554ddbf3e25e1cbeba71878eccefb533f848523"} Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.636759 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c159eeecbf8b7bfa17150d23e554ddbf3e25e1cbeba71878eccefb533f848523" Oct 03 08:55:47 crc kubenswrapper[4899]: I1003 08:55:47.636512 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-wfz45-config-rvt75" Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.026650 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-wfz45-config-rvt75"] Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.037109 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-wfz45-config-rvt75"] Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.109810 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-c734-account-create-f7xq6"] Oct 03 08:55:48 crc kubenswrapper[4899]: E1003 08:55:48.110249 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe83f382-2ea0-46bb-9179-9114add9f97e" containerName="ovn-config" Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.110267 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe83f382-2ea0-46bb-9179-9114add9f97e" containerName="ovn-config" Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.110514 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe83f382-2ea0-46bb-9179-9114add9f97e" containerName="ovn-config" Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.111233 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c734-account-create-f7xq6" Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.115560 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.118346 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c734-account-create-f7xq6"] Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.235333 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xk2l2\" (UniqueName: \"kubernetes.io/projected/2fd15087-4144-4272-9136-1537ce09cde1-kube-api-access-xk2l2\") pod \"keystone-c734-account-create-f7xq6\" (UID: \"2fd15087-4144-4272-9136-1537ce09cde1\") " pod="openstack/keystone-c734-account-create-f7xq6" Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.336784 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xk2l2\" (UniqueName: \"kubernetes.io/projected/2fd15087-4144-4272-9136-1537ce09cde1-kube-api-access-xk2l2\") pod \"keystone-c734-account-create-f7xq6\" (UID: \"2fd15087-4144-4272-9136-1537ce09cde1\") " pod="openstack/keystone-c734-account-create-f7xq6" Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.361179 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xk2l2\" (UniqueName: \"kubernetes.io/projected/2fd15087-4144-4272-9136-1537ce09cde1-kube-api-access-xk2l2\") pod \"keystone-c734-account-create-f7xq6\" (UID: \"2fd15087-4144-4272-9136-1537ce09cde1\") " pod="openstack/keystone-c734-account-create-f7xq6" Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.410174 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-d5d9-account-create-xkb58"] Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.412333 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-d5d9-account-create-xkb58" Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.414442 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.422168 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-d5d9-account-create-xkb58"] Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.438341 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c734-account-create-f7xq6" Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.539336 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhw58\" (UniqueName: \"kubernetes.io/projected/b9d2a024-e6cb-459b-baaa-98e13e6c46f2-kube-api-access-vhw58\") pod \"placement-d5d9-account-create-xkb58\" (UID: \"b9d2a024-e6cb-459b-baaa-98e13e6c46f2\") " pod="openstack/placement-d5d9-account-create-xkb58" Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.544812 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe83f382-2ea0-46bb-9179-9114add9f97e" path="/var/lib/kubelet/pods/fe83f382-2ea0-46bb-9179-9114add9f97e/volumes" Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.640564 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhw58\" (UniqueName: \"kubernetes.io/projected/b9d2a024-e6cb-459b-baaa-98e13e6c46f2-kube-api-access-vhw58\") pod \"placement-d5d9-account-create-xkb58\" (UID: \"b9d2a024-e6cb-459b-baaa-98e13e6c46f2\") " pod="openstack/placement-d5d9-account-create-xkb58" Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.659309 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhw58\" (UniqueName: \"kubernetes.io/projected/b9d2a024-e6cb-459b-baaa-98e13e6c46f2-kube-api-access-vhw58\") pod \"placement-d5d9-account-create-xkb58\" (UID: \"b9d2a024-e6cb-459b-baaa-98e13e6c46f2\") " pod="openstack/placement-d5d9-account-create-xkb58" Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.733495 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-d5d9-account-create-xkb58" Oct 03 08:55:48 crc kubenswrapper[4899]: I1003 08:55:48.866073 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c734-account-create-f7xq6"] Oct 03 08:55:48 crc kubenswrapper[4899]: W1003 08:55:48.897797 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fd15087_4144_4272_9136_1537ce09cde1.slice/crio-f2388014e0df4991bb7d09ff069db4cbcff62f19b8f2937c7e4284dced15f8d7 WatchSource:0}: Error finding container f2388014e0df4991bb7d09ff069db4cbcff62f19b8f2937c7e4284dced15f8d7: Status 404 returned error can't find the container with id f2388014e0df4991bb7d09ff069db4cbcff62f19b8f2937c7e4284dced15f8d7 Oct 03 08:55:49 crc kubenswrapper[4899]: I1003 08:55:49.032116 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-wfz45" Oct 03 08:55:49 crc kubenswrapper[4899]: I1003 08:55:49.156187 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-d5d9-account-create-xkb58"] Oct 03 08:55:49 crc kubenswrapper[4899]: W1003 08:55:49.165593 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9d2a024_e6cb_459b_baaa_98e13e6c46f2.slice/crio-82fd8c58886285b983f853253733ff5319681ab4f8af2801438d3480332e8614 WatchSource:0}: Error finding container 82fd8c58886285b983f853253733ff5319681ab4f8af2801438d3480332e8614: Status 404 returned error can't find the container with id 82fd8c58886285b983f853253733ff5319681ab4f8af2801438d3480332e8614 Oct 03 08:55:49 crc kubenswrapper[4899]: I1003 08:55:49.655116 4899 generic.go:334] "Generic (PLEG): container finished" podID="b9d2a024-e6cb-459b-baaa-98e13e6c46f2" containerID="379fe7de065d4009245ffe65b55403c1c91dc0d22eabde5774e6bc860a1e47a3" exitCode=0 Oct 03 08:55:49 crc kubenswrapper[4899]: I1003 08:55:49.655816 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-d5d9-account-create-xkb58" event={"ID":"b9d2a024-e6cb-459b-baaa-98e13e6c46f2","Type":"ContainerDied","Data":"379fe7de065d4009245ffe65b55403c1c91dc0d22eabde5774e6bc860a1e47a3"} Oct 03 08:55:49 crc kubenswrapper[4899]: I1003 08:55:49.655849 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-d5d9-account-create-xkb58" event={"ID":"b9d2a024-e6cb-459b-baaa-98e13e6c46f2","Type":"ContainerStarted","Data":"82fd8c58886285b983f853253733ff5319681ab4f8af2801438d3480332e8614"} Oct 03 08:55:49 crc kubenswrapper[4899]: I1003 08:55:49.657373 4899 generic.go:334] "Generic (PLEG): container finished" podID="2fd15087-4144-4272-9136-1537ce09cde1" containerID="cf3f56777ba87224ce7d6f600a81bda96f390259baa2944388ade315a271a660" exitCode=0 Oct 03 08:55:49 crc kubenswrapper[4899]: I1003 08:55:49.657415 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c734-account-create-f7xq6" event={"ID":"2fd15087-4144-4272-9136-1537ce09cde1","Type":"ContainerDied","Data":"cf3f56777ba87224ce7d6f600a81bda96f390259baa2944388ade315a271a660"} Oct 03 08:55:49 crc kubenswrapper[4899]: I1003 08:55:49.657438 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c734-account-create-f7xq6" event={"ID":"2fd15087-4144-4272-9136-1537ce09cde1","Type":"ContainerStarted","Data":"f2388014e0df4991bb7d09ff069db4cbcff62f19b8f2937c7e4284dced15f8d7"} Oct 03 08:55:53 crc kubenswrapper[4899]: I1003 08:55:53.427045 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:53 crc kubenswrapper[4899]: I1003 08:55:53.434718 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a9725bfb-2b4a-49d7-b4d8-c2235583f28f-etc-swift\") pod \"swift-storage-0\" (UID: \"a9725bfb-2b4a-49d7-b4d8-c2235583f28f\") " pod="openstack/swift-storage-0" Oct 03 08:55:53 crc kubenswrapper[4899]: I1003 08:55:53.647089 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 03 08:55:54 crc kubenswrapper[4899]: I1003 08:55:54.806113 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.017411 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-p88zw"] Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.018461 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-p88zw" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.028228 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-p88zw"] Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.094095 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.158323 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l975n\" (UniqueName: \"kubernetes.io/projected/d60b9a27-3601-451c-8e43-4329e0bd9a78-kube-api-access-l975n\") pod \"cinder-db-create-p88zw\" (UID: \"d60b9a27-3601-451c-8e43-4329e0bd9a78\") " pod="openstack/cinder-db-create-p88zw" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.231342 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-bb2l9"] Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.232355 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-bb2l9" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.259731 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l975n\" (UniqueName: \"kubernetes.io/projected/d60b9a27-3601-451c-8e43-4329e0bd9a78-kube-api-access-l975n\") pod \"cinder-db-create-p88zw\" (UID: \"d60b9a27-3601-451c-8e43-4329e0bd9a78\") " pod="openstack/cinder-db-create-p88zw" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.259822 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nw5tw\" (UniqueName: \"kubernetes.io/projected/de7dc4fe-1a51-4238-a8bf-3651a6cd28bc-kube-api-access-nw5tw\") pod \"barbican-db-create-bb2l9\" (UID: \"de7dc4fe-1a51-4238-a8bf-3651a6cd28bc\") " pod="openstack/barbican-db-create-bb2l9" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.266665 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-bb2l9"] Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.332816 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l975n\" (UniqueName: \"kubernetes.io/projected/d60b9a27-3601-451c-8e43-4329e0bd9a78-kube-api-access-l975n\") pod \"cinder-db-create-p88zw\" (UID: \"d60b9a27-3601-451c-8e43-4329e0bd9a78\") " pod="openstack/cinder-db-create-p88zw" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.341316 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-p88zw" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.366959 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nw5tw\" (UniqueName: \"kubernetes.io/projected/de7dc4fe-1a51-4238-a8bf-3651a6cd28bc-kube-api-access-nw5tw\") pod \"barbican-db-create-bb2l9\" (UID: \"de7dc4fe-1a51-4238-a8bf-3651a6cd28bc\") " pod="openstack/barbican-db-create-bb2l9" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.402108 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-sd9hr"] Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.403256 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-sd9hr" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.409907 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nw5tw\" (UniqueName: \"kubernetes.io/projected/de7dc4fe-1a51-4238-a8bf-3651a6cd28bc-kube-api-access-nw5tw\") pod \"barbican-db-create-bb2l9\" (UID: \"de7dc4fe-1a51-4238-a8bf-3651a6cd28bc\") " pod="openstack/barbican-db-create-bb2l9" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.414739 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-sd9hr"] Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.470052 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gjsg\" (UniqueName: \"kubernetes.io/projected/048b8d8c-402b-4ab3-bda5-847c2f8f6d9c-kube-api-access-5gjsg\") pod \"neutron-db-create-sd9hr\" (UID: \"048b8d8c-402b-4ab3-bda5-847c2f8f6d9c\") " pod="openstack/neutron-db-create-sd9hr" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.506461 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-d5d9-account-create-xkb58" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.536499 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c734-account-create-f7xq6" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.567357 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-bb2l9" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.571206 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhw58\" (UniqueName: \"kubernetes.io/projected/b9d2a024-e6cb-459b-baaa-98e13e6c46f2-kube-api-access-vhw58\") pod \"b9d2a024-e6cb-459b-baaa-98e13e6c46f2\" (UID: \"b9d2a024-e6cb-459b-baaa-98e13e6c46f2\") " Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.571265 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xk2l2\" (UniqueName: \"kubernetes.io/projected/2fd15087-4144-4272-9136-1537ce09cde1-kube-api-access-xk2l2\") pod \"2fd15087-4144-4272-9136-1537ce09cde1\" (UID: \"2fd15087-4144-4272-9136-1537ce09cde1\") " Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.571635 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gjsg\" (UniqueName: \"kubernetes.io/projected/048b8d8c-402b-4ab3-bda5-847c2f8f6d9c-kube-api-access-5gjsg\") pod \"neutron-db-create-sd9hr\" (UID: \"048b8d8c-402b-4ab3-bda5-847c2f8f6d9c\") " pod="openstack/neutron-db-create-sd9hr" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.575361 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fd15087-4144-4272-9136-1537ce09cde1-kube-api-access-xk2l2" (OuterVolumeSpecName: "kube-api-access-xk2l2") pod "2fd15087-4144-4272-9136-1537ce09cde1" (UID: "2fd15087-4144-4272-9136-1537ce09cde1"). InnerVolumeSpecName "kube-api-access-xk2l2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.588064 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9d2a024-e6cb-459b-baaa-98e13e6c46f2-kube-api-access-vhw58" (OuterVolumeSpecName: "kube-api-access-vhw58") pod "b9d2a024-e6cb-459b-baaa-98e13e6c46f2" (UID: "b9d2a024-e6cb-459b-baaa-98e13e6c46f2"). InnerVolumeSpecName "kube-api-access-vhw58". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.591748 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gjsg\" (UniqueName: \"kubernetes.io/projected/048b8d8c-402b-4ab3-bda5-847c2f8f6d9c-kube-api-access-5gjsg\") pod \"neutron-db-create-sd9hr\" (UID: \"048b8d8c-402b-4ab3-bda5-847c2f8f6d9c\") " pod="openstack/neutron-db-create-sd9hr" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.673214 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhw58\" (UniqueName: \"kubernetes.io/projected/b9d2a024-e6cb-459b-baaa-98e13e6c46f2-kube-api-access-vhw58\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.673249 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xk2l2\" (UniqueName: \"kubernetes.io/projected/2fd15087-4144-4272-9136-1537ce09cde1-kube-api-access-xk2l2\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.707204 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c734-account-create-f7xq6" event={"ID":"2fd15087-4144-4272-9136-1537ce09cde1","Type":"ContainerDied","Data":"f2388014e0df4991bb7d09ff069db4cbcff62f19b8f2937c7e4284dced15f8d7"} Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.707252 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2388014e0df4991bb7d09ff069db4cbcff62f19b8f2937c7e4284dced15f8d7" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.707321 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c734-account-create-f7xq6" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.711030 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-d5d9-account-create-xkb58" event={"ID":"b9d2a024-e6cb-459b-baaa-98e13e6c46f2","Type":"ContainerDied","Data":"82fd8c58886285b983f853253733ff5319681ab4f8af2801438d3480332e8614"} Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.711083 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82fd8c58886285b983f853253733ff5319681ab4f8af2801438d3480332e8614" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.711106 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-d5d9-account-create-xkb58" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.802701 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-p88zw"] Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.813214 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 03 08:55:55 crc kubenswrapper[4899]: W1003 08:55:55.823549 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9725bfb_2b4a_49d7_b4d8_c2235583f28f.slice/crio-8d00d265bebe7328cfc5a91eb845eac7cc4cd5b207f2be8fade098db87be989d WatchSource:0}: Error finding container 8d00d265bebe7328cfc5a91eb845eac7cc4cd5b207f2be8fade098db87be989d: Status 404 returned error can't find the container with id 8d00d265bebe7328cfc5a91eb845eac7cc4cd5b207f2be8fade098db87be989d Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.848853 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-sd9hr" Oct 03 08:55:55 crc kubenswrapper[4899]: I1003 08:55:55.854622 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-bb2l9"] Oct 03 08:55:55 crc kubenswrapper[4899]: W1003 08:55:55.868514 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde7dc4fe_1a51_4238_a8bf_3651a6cd28bc.slice/crio-3d47825cd5db023470244f410b0638184cc40d42202c1a27742169fb9167ef66 WatchSource:0}: Error finding container 3d47825cd5db023470244f410b0638184cc40d42202c1a27742169fb9167ef66: Status 404 returned error can't find the container with id 3d47825cd5db023470244f410b0638184cc40d42202c1a27742169fb9167ef66 Oct 03 08:55:56 crc kubenswrapper[4899]: I1003 08:55:56.312696 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-sd9hr"] Oct 03 08:55:56 crc kubenswrapper[4899]: I1003 08:55:56.726139 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-r7dz5" event={"ID":"e95726c5-31b4-47eb-9e32-eec52266c460","Type":"ContainerStarted","Data":"dc1d8f34db4c41a1dd1cf1d211a3c94f4a202e7eecb4b65e5c12c447b61e0f8a"} Oct 03 08:55:56 crc kubenswrapper[4899]: I1003 08:55:56.733770 4899 generic.go:334] "Generic (PLEG): container finished" podID="d60b9a27-3601-451c-8e43-4329e0bd9a78" containerID="dca5f89608b5bf5fccc25bf3d0544bcc7822101647485fe404a3c913d668c87c" exitCode=0 Oct 03 08:55:56 crc kubenswrapper[4899]: I1003 08:55:56.733881 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-p88zw" event={"ID":"d60b9a27-3601-451c-8e43-4329e0bd9a78","Type":"ContainerDied","Data":"dca5f89608b5bf5fccc25bf3d0544bcc7822101647485fe404a3c913d668c87c"} Oct 03 08:55:56 crc kubenswrapper[4899]: I1003 08:55:56.733930 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-p88zw" event={"ID":"d60b9a27-3601-451c-8e43-4329e0bd9a78","Type":"ContainerStarted","Data":"6a068791ef558742414450408ee05004fa8f17e4afa686b2311860d117fc94dc"} Oct 03 08:55:56 crc kubenswrapper[4899]: I1003 08:55:56.739183 4899 generic.go:334] "Generic (PLEG): container finished" podID="048b8d8c-402b-4ab3-bda5-847c2f8f6d9c" containerID="b5977bbe81aaf40a87a557eaf4e8f2f6ceb859ac2843377a8ca4d677cd269bd4" exitCode=0 Oct 03 08:55:56 crc kubenswrapper[4899]: I1003 08:55:56.739242 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-sd9hr" event={"ID":"048b8d8c-402b-4ab3-bda5-847c2f8f6d9c","Type":"ContainerDied","Data":"b5977bbe81aaf40a87a557eaf4e8f2f6ceb859ac2843377a8ca4d677cd269bd4"} Oct 03 08:55:56 crc kubenswrapper[4899]: I1003 08:55:56.739266 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-sd9hr" event={"ID":"048b8d8c-402b-4ab3-bda5-847c2f8f6d9c","Type":"ContainerStarted","Data":"54b870db09b10b032ace4d673dcf1c21a5dbb6d90348956bc35ec6dd567bd9f3"} Oct 03 08:55:56 crc kubenswrapper[4899]: I1003 08:55:56.741729 4899 generic.go:334] "Generic (PLEG): container finished" podID="de7dc4fe-1a51-4238-a8bf-3651a6cd28bc" containerID="6c7947c84c25547b543e8e74b8151e5204407a9b2c25ee9645290127cee1a8fe" exitCode=0 Oct 03 08:55:56 crc kubenswrapper[4899]: I1003 08:55:56.741785 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-bb2l9" event={"ID":"de7dc4fe-1a51-4238-a8bf-3651a6cd28bc","Type":"ContainerDied","Data":"6c7947c84c25547b543e8e74b8151e5204407a9b2c25ee9645290127cee1a8fe"} Oct 03 08:55:56 crc kubenswrapper[4899]: I1003 08:55:56.741810 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-bb2l9" event={"ID":"de7dc4fe-1a51-4238-a8bf-3651a6cd28bc","Type":"ContainerStarted","Data":"3d47825cd5db023470244f410b0638184cc40d42202c1a27742169fb9167ef66"} Oct 03 08:55:56 crc kubenswrapper[4899]: I1003 08:55:56.742399 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-r7dz5" podStartSLOduration=3.073460104 podStartE2EDuration="13.742390009s" podCreationTimestamp="2025-10-03 08:55:43 +0000 UTC" firstStartedPulling="2025-10-03 08:55:44.752863241 +0000 UTC m=+918.860348184" lastFinishedPulling="2025-10-03 08:55:55.421793136 +0000 UTC m=+929.529278089" observedRunningTime="2025-10-03 08:55:56.740777829 +0000 UTC m=+930.848262782" watchObservedRunningTime="2025-10-03 08:55:56.742390009 +0000 UTC m=+930.849874962" Oct 03 08:55:56 crc kubenswrapper[4899]: I1003 08:55:56.744759 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a9725bfb-2b4a-49d7-b4d8-c2235583f28f","Type":"ContainerStarted","Data":"8d00d265bebe7328cfc5a91eb845eac7cc4cd5b207f2be8fade098db87be989d"} Oct 03 08:55:57 crc kubenswrapper[4899]: I1003 08:55:57.758766 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a9725bfb-2b4a-49d7-b4d8-c2235583f28f","Type":"ContainerStarted","Data":"730ad6b8f2c2f1f12c81dc86b0ee0f8d19d1c0c30627b3dd287e96774d6ff0a6"} Oct 03 08:55:57 crc kubenswrapper[4899]: I1003 08:55:57.759303 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a9725bfb-2b4a-49d7-b4d8-c2235583f28f","Type":"ContainerStarted","Data":"086384e3b3865cf616a0a918c63409d074b185056059b34954cb3eba0a79afeb"} Oct 03 08:55:57 crc kubenswrapper[4899]: I1003 08:55:57.759314 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a9725bfb-2b4a-49d7-b4d8-c2235583f28f","Type":"ContainerStarted","Data":"f964766f7eb0d6f1ba4d11f3140788f480f1e91e79aab864b095d01bb48935f5"} Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.116959 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-p88zw" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.169870 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-sd9hr" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.194985 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-bb2l9" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.239021 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nw5tw\" (UniqueName: \"kubernetes.io/projected/de7dc4fe-1a51-4238-a8bf-3651a6cd28bc-kube-api-access-nw5tw\") pod \"de7dc4fe-1a51-4238-a8bf-3651a6cd28bc\" (UID: \"de7dc4fe-1a51-4238-a8bf-3651a6cd28bc\") " Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.239076 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gjsg\" (UniqueName: \"kubernetes.io/projected/048b8d8c-402b-4ab3-bda5-847c2f8f6d9c-kube-api-access-5gjsg\") pod \"048b8d8c-402b-4ab3-bda5-847c2f8f6d9c\" (UID: \"048b8d8c-402b-4ab3-bda5-847c2f8f6d9c\") " Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.239247 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l975n\" (UniqueName: \"kubernetes.io/projected/d60b9a27-3601-451c-8e43-4329e0bd9a78-kube-api-access-l975n\") pod \"d60b9a27-3601-451c-8e43-4329e0bd9a78\" (UID: \"d60b9a27-3601-451c-8e43-4329e0bd9a78\") " Oct 03 08:55:58 crc kubenswrapper[4899]: E1003 08:55:58.241061 4899 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.217:42648->38.129.56.217:44793: write tcp 38.129.56.217:42648->38.129.56.217:44793: write: broken pipe Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.250871 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de7dc4fe-1a51-4238-a8bf-3651a6cd28bc-kube-api-access-nw5tw" (OuterVolumeSpecName: "kube-api-access-nw5tw") pod "de7dc4fe-1a51-4238-a8bf-3651a6cd28bc" (UID: "de7dc4fe-1a51-4238-a8bf-3651a6cd28bc"). InnerVolumeSpecName "kube-api-access-nw5tw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.253724 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/048b8d8c-402b-4ab3-bda5-847c2f8f6d9c-kube-api-access-5gjsg" (OuterVolumeSpecName: "kube-api-access-5gjsg") pod "048b8d8c-402b-4ab3-bda5-847c2f8f6d9c" (UID: "048b8d8c-402b-4ab3-bda5-847c2f8f6d9c"). InnerVolumeSpecName "kube-api-access-5gjsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.254457 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d60b9a27-3601-451c-8e43-4329e0bd9a78-kube-api-access-l975n" (OuterVolumeSpecName: "kube-api-access-l975n") pod "d60b9a27-3601-451c-8e43-4329e0bd9a78" (UID: "d60b9a27-3601-451c-8e43-4329e0bd9a78"). InnerVolumeSpecName "kube-api-access-l975n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.341241 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nw5tw\" (UniqueName: \"kubernetes.io/projected/de7dc4fe-1a51-4238-a8bf-3651a6cd28bc-kube-api-access-nw5tw\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.341272 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gjsg\" (UniqueName: \"kubernetes.io/projected/048b8d8c-402b-4ab3-bda5-847c2f8f6d9c-kube-api-access-5gjsg\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.341281 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l975n\" (UniqueName: \"kubernetes.io/projected/d60b9a27-3601-451c-8e43-4329e0bd9a78-kube-api-access-l975n\") on node \"crc\" DevicePath \"\"" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.703072 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-cff2w"] Oct 03 08:55:58 crc kubenswrapper[4899]: E1003 08:55:58.703482 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d60b9a27-3601-451c-8e43-4329e0bd9a78" containerName="mariadb-database-create" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.703502 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="d60b9a27-3601-451c-8e43-4329e0bd9a78" containerName="mariadb-database-create" Oct 03 08:55:58 crc kubenswrapper[4899]: E1003 08:55:58.703529 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9d2a024-e6cb-459b-baaa-98e13e6c46f2" containerName="mariadb-account-create" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.703537 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9d2a024-e6cb-459b-baaa-98e13e6c46f2" containerName="mariadb-account-create" Oct 03 08:55:58 crc kubenswrapper[4899]: E1003 08:55:58.703551 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de7dc4fe-1a51-4238-a8bf-3651a6cd28bc" containerName="mariadb-database-create" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.703560 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="de7dc4fe-1a51-4238-a8bf-3651a6cd28bc" containerName="mariadb-database-create" Oct 03 08:55:58 crc kubenswrapper[4899]: E1003 08:55:58.703577 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fd15087-4144-4272-9136-1537ce09cde1" containerName="mariadb-account-create" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.703584 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fd15087-4144-4272-9136-1537ce09cde1" containerName="mariadb-account-create" Oct 03 08:55:58 crc kubenswrapper[4899]: E1003 08:55:58.703605 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="048b8d8c-402b-4ab3-bda5-847c2f8f6d9c" containerName="mariadb-database-create" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.703614 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="048b8d8c-402b-4ab3-bda5-847c2f8f6d9c" containerName="mariadb-database-create" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.703792 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="048b8d8c-402b-4ab3-bda5-847c2f8f6d9c" containerName="mariadb-database-create" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.703835 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="d60b9a27-3601-451c-8e43-4329e0bd9a78" containerName="mariadb-database-create" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.703855 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9d2a024-e6cb-459b-baaa-98e13e6c46f2" containerName="mariadb-account-create" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.703872 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fd15087-4144-4272-9136-1537ce09cde1" containerName="mariadb-account-create" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.703887 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="de7dc4fe-1a51-4238-a8bf-3651a6cd28bc" containerName="mariadb-database-create" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.704523 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-cff2w" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.706671 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6hp7k" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.706946 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.707090 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.707206 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.717156 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-cff2w"] Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.747323 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237108cb-238e-4abc-a099-60c1ce478004-combined-ca-bundle\") pod \"keystone-db-sync-cff2w\" (UID: \"237108cb-238e-4abc-a099-60c1ce478004\") " pod="openstack/keystone-db-sync-cff2w" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.747369 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52fmt\" (UniqueName: \"kubernetes.io/projected/237108cb-238e-4abc-a099-60c1ce478004-kube-api-access-52fmt\") pod \"keystone-db-sync-cff2w\" (UID: \"237108cb-238e-4abc-a099-60c1ce478004\") " pod="openstack/keystone-db-sync-cff2w" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.747458 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/237108cb-238e-4abc-a099-60c1ce478004-config-data\") pod \"keystone-db-sync-cff2w\" (UID: \"237108cb-238e-4abc-a099-60c1ce478004\") " pod="openstack/keystone-db-sync-cff2w" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.766658 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-bb2l9" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.766822 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-bb2l9" event={"ID":"de7dc4fe-1a51-4238-a8bf-3651a6cd28bc","Type":"ContainerDied","Data":"3d47825cd5db023470244f410b0638184cc40d42202c1a27742169fb9167ef66"} Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.766859 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d47825cd5db023470244f410b0638184cc40d42202c1a27742169fb9167ef66" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.769364 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a9725bfb-2b4a-49d7-b4d8-c2235583f28f","Type":"ContainerStarted","Data":"459f09653e2501c6345bf1f01d366e41d285b589accaa4fea1c8c869dadfd674"} Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.771952 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-p88zw" event={"ID":"d60b9a27-3601-451c-8e43-4329e0bd9a78","Type":"ContainerDied","Data":"6a068791ef558742414450408ee05004fa8f17e4afa686b2311860d117fc94dc"} Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.771979 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a068791ef558742414450408ee05004fa8f17e4afa686b2311860d117fc94dc" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.772015 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-p88zw" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.773552 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-sd9hr" event={"ID":"048b8d8c-402b-4ab3-bda5-847c2f8f6d9c","Type":"ContainerDied","Data":"54b870db09b10b032ace4d673dcf1c21a5dbb6d90348956bc35ec6dd567bd9f3"} Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.773584 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54b870db09b10b032ace4d673dcf1c21a5dbb6d90348956bc35ec6dd567bd9f3" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.773604 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-sd9hr" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.849798 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/237108cb-238e-4abc-a099-60c1ce478004-config-data\") pod \"keystone-db-sync-cff2w\" (UID: \"237108cb-238e-4abc-a099-60c1ce478004\") " pod="openstack/keystone-db-sync-cff2w" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.849962 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237108cb-238e-4abc-a099-60c1ce478004-combined-ca-bundle\") pod \"keystone-db-sync-cff2w\" (UID: \"237108cb-238e-4abc-a099-60c1ce478004\") " pod="openstack/keystone-db-sync-cff2w" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.850001 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52fmt\" (UniqueName: \"kubernetes.io/projected/237108cb-238e-4abc-a099-60c1ce478004-kube-api-access-52fmt\") pod \"keystone-db-sync-cff2w\" (UID: \"237108cb-238e-4abc-a099-60c1ce478004\") " pod="openstack/keystone-db-sync-cff2w" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.856265 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237108cb-238e-4abc-a099-60c1ce478004-combined-ca-bundle\") pod \"keystone-db-sync-cff2w\" (UID: \"237108cb-238e-4abc-a099-60c1ce478004\") " pod="openstack/keystone-db-sync-cff2w" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.856474 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/237108cb-238e-4abc-a099-60c1ce478004-config-data\") pod \"keystone-db-sync-cff2w\" (UID: \"237108cb-238e-4abc-a099-60c1ce478004\") " pod="openstack/keystone-db-sync-cff2w" Oct 03 08:55:58 crc kubenswrapper[4899]: I1003 08:55:58.868650 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52fmt\" (UniqueName: \"kubernetes.io/projected/237108cb-238e-4abc-a099-60c1ce478004-kube-api-access-52fmt\") pod \"keystone-db-sync-cff2w\" (UID: \"237108cb-238e-4abc-a099-60c1ce478004\") " pod="openstack/keystone-db-sync-cff2w" Oct 03 08:55:59 crc kubenswrapper[4899]: I1003 08:55:59.040725 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-cff2w" Oct 03 08:55:59 crc kubenswrapper[4899]: I1003 08:55:59.465323 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-cff2w"] Oct 03 08:55:59 crc kubenswrapper[4899]: W1003 08:55:59.467274 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod237108cb_238e_4abc_a099_60c1ce478004.slice/crio-676bdab4dc49e3287fafe108204f1c99a6520c10e4067df0a2e13ece53f427c9 WatchSource:0}: Error finding container 676bdab4dc49e3287fafe108204f1c99a6520c10e4067df0a2e13ece53f427c9: Status 404 returned error can't find the container with id 676bdab4dc49e3287fafe108204f1c99a6520c10e4067df0a2e13ece53f427c9 Oct 03 08:55:59 crc kubenswrapper[4899]: I1003 08:55:59.782191 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-cff2w" event={"ID":"237108cb-238e-4abc-a099-60c1ce478004","Type":"ContainerStarted","Data":"676bdab4dc49e3287fafe108204f1c99a6520c10e4067df0a2e13ece53f427c9"} Oct 03 08:56:01 crc kubenswrapper[4899]: I1003 08:56:01.804082 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a9725bfb-2b4a-49d7-b4d8-c2235583f28f","Type":"ContainerStarted","Data":"676f130b82a6d18224d2c4af66d90fad4f73c16cbc0d577e35062a85115cc13f"} Oct 03 08:56:01 crc kubenswrapper[4899]: I1003 08:56:01.804461 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a9725bfb-2b4a-49d7-b4d8-c2235583f28f","Type":"ContainerStarted","Data":"2ecab78a336af152e6b47564f9463d0033d80ef3c6386feb2cac76a46d564888"} Oct 03 08:56:02 crc kubenswrapper[4899]: I1003 08:56:02.815036 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a9725bfb-2b4a-49d7-b4d8-c2235583f28f","Type":"ContainerStarted","Data":"ada40daf5f52cf55070780d78c8588c55ce00eceec0cd5ab29104820dc7e6095"} Oct 03 08:56:02 crc kubenswrapper[4899]: I1003 08:56:02.815098 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a9725bfb-2b4a-49d7-b4d8-c2235583f28f","Type":"ContainerStarted","Data":"b07e53fb3e98e2f2238a7165e076460a9c95e5706a190fbf6b2acd518d21e4df"} Oct 03 08:56:04 crc kubenswrapper[4899]: I1003 08:56:04.833246 4899 generic.go:334] "Generic (PLEG): container finished" podID="e95726c5-31b4-47eb-9e32-eec52266c460" containerID="dc1d8f34db4c41a1dd1cf1d211a3c94f4a202e7eecb4b65e5c12c447b61e0f8a" exitCode=0 Oct 03 08:56:04 crc kubenswrapper[4899]: I1003 08:56:04.834554 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-r7dz5" event={"ID":"e95726c5-31b4-47eb-9e32-eec52266c460","Type":"ContainerDied","Data":"dc1d8f34db4c41a1dd1cf1d211a3c94f4a202e7eecb4b65e5c12c447b61e0f8a"} Oct 03 08:56:04 crc kubenswrapper[4899]: I1003 08:56:04.845370 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-cff2w" event={"ID":"237108cb-238e-4abc-a099-60c1ce478004","Type":"ContainerStarted","Data":"7e4717f08297f6e535ff8f209cebedd1559de0f9904ec33aa861120513b406e0"} Oct 03 08:56:04 crc kubenswrapper[4899]: I1003 08:56:04.875397 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-cff2w" podStartSLOduration=2.015463577 podStartE2EDuration="6.875377504s" podCreationTimestamp="2025-10-03 08:55:58 +0000 UTC" firstStartedPulling="2025-10-03 08:55:59.469395888 +0000 UTC m=+933.576880841" lastFinishedPulling="2025-10-03 08:56:04.329309815 +0000 UTC m=+938.436794768" observedRunningTime="2025-10-03 08:56:04.871962266 +0000 UTC m=+938.979447219" watchObservedRunningTime="2025-10-03 08:56:04.875377504 +0000 UTC m=+938.982862457" Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.290525 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-581b-account-create-tbmqr"] Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.291741 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-581b-account-create-tbmqr" Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.294129 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.303809 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-581b-account-create-tbmqr"] Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.360559 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lflz\" (UniqueName: \"kubernetes.io/projected/9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a-kube-api-access-6lflz\") pod \"cinder-581b-account-create-tbmqr\" (UID: \"9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a\") " pod="openstack/cinder-581b-account-create-tbmqr" Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.462600 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lflz\" (UniqueName: \"kubernetes.io/projected/9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a-kube-api-access-6lflz\") pod \"cinder-581b-account-create-tbmqr\" (UID: \"9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a\") " pod="openstack/cinder-581b-account-create-tbmqr" Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.486864 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-1c09-account-create-qnsmw"] Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.490920 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1c09-account-create-qnsmw" Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.493303 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.499546 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-1c09-account-create-qnsmw"] Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.501421 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lflz\" (UniqueName: \"kubernetes.io/projected/9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a-kube-api-access-6lflz\") pod \"cinder-581b-account-create-tbmqr\" (UID: \"9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a\") " pod="openstack/cinder-581b-account-create-tbmqr" Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.564473 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fsz4\" (UniqueName: \"kubernetes.io/projected/d9905523-1414-4f5e-958d-22ffc752c061-kube-api-access-2fsz4\") pod \"barbican-1c09-account-create-qnsmw\" (UID: \"d9905523-1414-4f5e-958d-22ffc752c061\") " pod="openstack/barbican-1c09-account-create-qnsmw" Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.590261 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-10c1-account-create-jgc9k"] Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.592923 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-10c1-account-create-jgc9k" Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.595043 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.611015 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-10c1-account-create-jgc9k"] Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.624068 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-581b-account-create-tbmqr" Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.670783 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckj75\" (UniqueName: \"kubernetes.io/projected/a7891f02-e1ab-4c31-a210-971c66974961-kube-api-access-ckj75\") pod \"neutron-10c1-account-create-jgc9k\" (UID: \"a7891f02-e1ab-4c31-a210-971c66974961\") " pod="openstack/neutron-10c1-account-create-jgc9k" Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.670859 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fsz4\" (UniqueName: \"kubernetes.io/projected/d9905523-1414-4f5e-958d-22ffc752c061-kube-api-access-2fsz4\") pod \"barbican-1c09-account-create-qnsmw\" (UID: \"d9905523-1414-4f5e-958d-22ffc752c061\") " pod="openstack/barbican-1c09-account-create-qnsmw" Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.689368 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fsz4\" (UniqueName: \"kubernetes.io/projected/d9905523-1414-4f5e-958d-22ffc752c061-kube-api-access-2fsz4\") pod \"barbican-1c09-account-create-qnsmw\" (UID: \"d9905523-1414-4f5e-958d-22ffc752c061\") " pod="openstack/barbican-1c09-account-create-qnsmw" Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.771991 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckj75\" (UniqueName: \"kubernetes.io/projected/a7891f02-e1ab-4c31-a210-971c66974961-kube-api-access-ckj75\") pod \"neutron-10c1-account-create-jgc9k\" (UID: \"a7891f02-e1ab-4c31-a210-971c66974961\") " pod="openstack/neutron-10c1-account-create-jgc9k" Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.793571 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckj75\" (UniqueName: \"kubernetes.io/projected/a7891f02-e1ab-4c31-a210-971c66974961-kube-api-access-ckj75\") pod \"neutron-10c1-account-create-jgc9k\" (UID: \"a7891f02-e1ab-4c31-a210-971c66974961\") " pod="openstack/neutron-10c1-account-create-jgc9k" Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.887806 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1c09-account-create-qnsmw" Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.895197 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a9725bfb-2b4a-49d7-b4d8-c2235583f28f","Type":"ContainerStarted","Data":"59cd06647f112df11dc9a09a39734e8061da92bb0a7f154ae8ae559c4a1254ae"} Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.895250 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a9725bfb-2b4a-49d7-b4d8-c2235583f28f","Type":"ContainerStarted","Data":"0be0cb045d965f880f47e9396f8a154a31dee058f9907d5aa551eed730b7c32d"} Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.895263 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a9725bfb-2b4a-49d7-b4d8-c2235583f28f","Type":"ContainerStarted","Data":"d7eec3bc4bee56da55affa94b1b794cbbcb13d62717d422fa4aec1b62db8a775"} Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.895275 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a9725bfb-2b4a-49d7-b4d8-c2235583f28f","Type":"ContainerStarted","Data":"d9d3e2c715f97a6f8e5674be4a778b34de8587a7e21d32fa6e6daf8851e5a6e1"} Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.895297 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a9725bfb-2b4a-49d7-b4d8-c2235583f28f","Type":"ContainerStarted","Data":"928b71cffcc16fa8f998156e1bc2862445873f61681bee7493bfb3d2b9342441"} Oct 03 08:56:05 crc kubenswrapper[4899]: I1003 08:56:05.923445 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-10c1-account-create-jgc9k" Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.161831 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-581b-account-create-tbmqr"] Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.262437 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-r7dz5" Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.284047 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e95726c5-31b4-47eb-9e32-eec52266c460-config-data\") pod \"e95726c5-31b4-47eb-9e32-eec52266c460\" (UID: \"e95726c5-31b4-47eb-9e32-eec52266c460\") " Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.284194 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e95726c5-31b4-47eb-9e32-eec52266c460-combined-ca-bundle\") pod \"e95726c5-31b4-47eb-9e32-eec52266c460\" (UID: \"e95726c5-31b4-47eb-9e32-eec52266c460\") " Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.284222 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwmd9\" (UniqueName: \"kubernetes.io/projected/e95726c5-31b4-47eb-9e32-eec52266c460-kube-api-access-hwmd9\") pod \"e95726c5-31b4-47eb-9e32-eec52266c460\" (UID: \"e95726c5-31b4-47eb-9e32-eec52266c460\") " Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.284301 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e95726c5-31b4-47eb-9e32-eec52266c460-db-sync-config-data\") pod \"e95726c5-31b4-47eb-9e32-eec52266c460\" (UID: \"e95726c5-31b4-47eb-9e32-eec52266c460\") " Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.290617 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e95726c5-31b4-47eb-9e32-eec52266c460-kube-api-access-hwmd9" (OuterVolumeSpecName: "kube-api-access-hwmd9") pod "e95726c5-31b4-47eb-9e32-eec52266c460" (UID: "e95726c5-31b4-47eb-9e32-eec52266c460"). InnerVolumeSpecName "kube-api-access-hwmd9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.308053 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e95726c5-31b4-47eb-9e32-eec52266c460-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "e95726c5-31b4-47eb-9e32-eec52266c460" (UID: "e95726c5-31b4-47eb-9e32-eec52266c460"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.333124 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e95726c5-31b4-47eb-9e32-eec52266c460-config-data" (OuterVolumeSpecName: "config-data") pod "e95726c5-31b4-47eb-9e32-eec52266c460" (UID: "e95726c5-31b4-47eb-9e32-eec52266c460"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.334207 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-1c09-account-create-qnsmw"] Oct 03 08:56:06 crc kubenswrapper[4899]: W1003 08:56:06.337802 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9905523_1414_4f5e_958d_22ffc752c061.slice/crio-01337fa7ca8c30a619a7498eff3ed39a8bceeb30f64d245b64926d8cf68b299e WatchSource:0}: Error finding container 01337fa7ca8c30a619a7498eff3ed39a8bceeb30f64d245b64926d8cf68b299e: Status 404 returned error can't find the container with id 01337fa7ca8c30a619a7498eff3ed39a8bceeb30f64d245b64926d8cf68b299e Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.338209 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e95726c5-31b4-47eb-9e32-eec52266c460-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e95726c5-31b4-47eb-9e32-eec52266c460" (UID: "e95726c5-31b4-47eb-9e32-eec52266c460"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.386504 4899 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e95726c5-31b4-47eb-9e32-eec52266c460-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.386541 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e95726c5-31b4-47eb-9e32-eec52266c460-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.386552 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e95726c5-31b4-47eb-9e32-eec52266c460-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.386560 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwmd9\" (UniqueName: \"kubernetes.io/projected/e95726c5-31b4-47eb-9e32-eec52266c460-kube-api-access-hwmd9\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.486404 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-10c1-account-create-jgc9k"] Oct 03 08:56:06 crc kubenswrapper[4899]: W1003 08:56:06.510645 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7891f02_e1ab_4c31_a210_971c66974961.slice/crio-d7e6f8ddcff503295cf3ea7ad14d0980f766769568852786e625363581a22a71 WatchSource:0}: Error finding container d7e6f8ddcff503295cf3ea7ad14d0980f766769568852786e625363581a22a71: Status 404 returned error can't find the container with id d7e6f8ddcff503295cf3ea7ad14d0980f766769568852786e625363581a22a71 Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.902847 4899 generic.go:334] "Generic (PLEG): container finished" podID="a7891f02-e1ab-4c31-a210-971c66974961" containerID="c9168264cd0eb8375208d94c953dccc869df245f6de94d6190e4a0d6241d6419" exitCode=0 Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.903141 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-10c1-account-create-jgc9k" event={"ID":"a7891f02-e1ab-4c31-a210-971c66974961","Type":"ContainerDied","Data":"c9168264cd0eb8375208d94c953dccc869df245f6de94d6190e4a0d6241d6419"} Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.903171 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-10c1-account-create-jgc9k" event={"ID":"a7891f02-e1ab-4c31-a210-971c66974961","Type":"ContainerStarted","Data":"d7e6f8ddcff503295cf3ea7ad14d0980f766769568852786e625363581a22a71"} Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.914468 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a9725bfb-2b4a-49d7-b4d8-c2235583f28f","Type":"ContainerStarted","Data":"4623ab1f30e222d5986bae5dd3f50cd81bb00d643268224cfbaa8d20d41a2dfd"} Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.914521 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a9725bfb-2b4a-49d7-b4d8-c2235583f28f","Type":"ContainerStarted","Data":"885fc379ccf6e7f0f644329da2f8f6d0aa87903582c8429ee924ba08274fff60"} Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.920980 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-r7dz5" event={"ID":"e95726c5-31b4-47eb-9e32-eec52266c460","Type":"ContainerDied","Data":"d14a983f481fac5026a25f2d14d022843c652f8683d4aed7dc370d8eefd424b3"} Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.921027 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d14a983f481fac5026a25f2d14d022843c652f8683d4aed7dc370d8eefd424b3" Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.921099 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-r7dz5" Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.923826 4899 generic.go:334] "Generic (PLEG): container finished" podID="9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a" containerID="0b2ba05d043d16e2df9c8b68595e454897d7238e0886fb07a1990d80402ce575" exitCode=0 Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.923923 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-581b-account-create-tbmqr" event={"ID":"9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a","Type":"ContainerDied","Data":"0b2ba05d043d16e2df9c8b68595e454897d7238e0886fb07a1990d80402ce575"} Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.923956 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-581b-account-create-tbmqr" event={"ID":"9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a","Type":"ContainerStarted","Data":"2c2b9e2577f925928ff121dfeae4e39b75626314df52e48723c31ff30e99dcda"} Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.928560 4899 generic.go:334] "Generic (PLEG): container finished" podID="d9905523-1414-4f5e-958d-22ffc752c061" containerID="861aaae4a6a7eb9b804be54e689589866de6ddbf6702b0e8991640380ed7a9aa" exitCode=0 Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.928597 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1c09-account-create-qnsmw" event={"ID":"d9905523-1414-4f5e-958d-22ffc752c061","Type":"ContainerDied","Data":"861aaae4a6a7eb9b804be54e689589866de6ddbf6702b0e8991640380ed7a9aa"} Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.928616 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1c09-account-create-qnsmw" event={"ID":"d9905523-1414-4f5e-958d-22ffc752c061","Type":"ContainerStarted","Data":"01337fa7ca8c30a619a7498eff3ed39a8bceeb30f64d245b64926d8cf68b299e"} Oct 03 08:56:06 crc kubenswrapper[4899]: I1003 08:56:06.965816 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=38.030144949 podStartE2EDuration="46.965792043s" podCreationTimestamp="2025-10-03 08:55:20 +0000 UTC" firstStartedPulling="2025-10-03 08:55:55.836223969 +0000 UTC m=+929.943708922" lastFinishedPulling="2025-10-03 08:56:04.771871063 +0000 UTC m=+938.879356016" observedRunningTime="2025-10-03 08:56:06.952734861 +0000 UTC m=+941.060219824" watchObservedRunningTime="2025-10-03 08:56:06.965792043 +0000 UTC m=+941.073276996" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.211150 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-qcsh2"] Oct 03 08:56:07 crc kubenswrapper[4899]: E1003 08:56:07.211594 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e95726c5-31b4-47eb-9e32-eec52266c460" containerName="glance-db-sync" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.211611 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="e95726c5-31b4-47eb-9e32-eec52266c460" containerName="glance-db-sync" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.211787 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="e95726c5-31b4-47eb-9e32-eec52266c460" containerName="glance-db-sync" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.212771 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.245017 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-qcsh2"] Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.321762 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-ovsdbserver-nb\") pod \"dnsmasq-dns-74dc88fc-qcsh2\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.321824 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-dns-svc\") pod \"dnsmasq-dns-74dc88fc-qcsh2\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.321847 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhr89\" (UniqueName: \"kubernetes.io/projected/639ed76f-a794-4e2f-b1ab-7096abf1ba58-kube-api-access-fhr89\") pod \"dnsmasq-dns-74dc88fc-qcsh2\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.321924 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-config\") pod \"dnsmasq-dns-74dc88fc-qcsh2\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.321996 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-ovsdbserver-sb\") pod \"dnsmasq-dns-74dc88fc-qcsh2\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.388563 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-qcsh2"] Oct 03 08:56:07 crc kubenswrapper[4899]: E1003 08:56:07.389199 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-fhr89 ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" podUID="639ed76f-a794-4e2f-b1ab-7096abf1ba58" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.424336 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-ovsdbserver-nb\") pod \"dnsmasq-dns-74dc88fc-qcsh2\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.424414 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-dns-svc\") pod \"dnsmasq-dns-74dc88fc-qcsh2\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.424451 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhr89\" (UniqueName: \"kubernetes.io/projected/639ed76f-a794-4e2f-b1ab-7096abf1ba58-kube-api-access-fhr89\") pod \"dnsmasq-dns-74dc88fc-qcsh2\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.424525 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-config\") pod \"dnsmasq-dns-74dc88fc-qcsh2\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.424633 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-ovsdbserver-sb\") pod \"dnsmasq-dns-74dc88fc-qcsh2\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.425479 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-config\") pod \"dnsmasq-dns-74dc88fc-qcsh2\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.425515 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-ovsdbserver-nb\") pod \"dnsmasq-dns-74dc88fc-qcsh2\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.425578 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-ovsdbserver-sb\") pod \"dnsmasq-dns-74dc88fc-qcsh2\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.426015 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-dns-svc\") pod \"dnsmasq-dns-74dc88fc-qcsh2\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.440370 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-zrlh8"] Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.441711 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.443871 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.477022 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhr89\" (UniqueName: \"kubernetes.io/projected/639ed76f-a794-4e2f-b1ab-7096abf1ba58-kube-api-access-fhr89\") pod \"dnsmasq-dns-74dc88fc-qcsh2\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.528316 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-dns-swift-storage-0\") pod \"dnsmasq-dns-5f59b8f679-zrlh8\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.529031 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-ovsdbserver-nb\") pod \"dnsmasq-dns-5f59b8f679-zrlh8\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.529096 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dfvt\" (UniqueName: \"kubernetes.io/projected/1f3a53b3-e907-4cff-ad72-d6a217bea837-kube-api-access-4dfvt\") pod \"dnsmasq-dns-5f59b8f679-zrlh8\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.529155 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-ovsdbserver-sb\") pod \"dnsmasq-dns-5f59b8f679-zrlh8\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.529190 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-dns-svc\") pod \"dnsmasq-dns-5f59b8f679-zrlh8\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.529235 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-config\") pod \"dnsmasq-dns-5f59b8f679-zrlh8\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.533663 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-zrlh8"] Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.630991 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-ovsdbserver-sb\") pod \"dnsmasq-dns-5f59b8f679-zrlh8\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.631046 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-dns-svc\") pod \"dnsmasq-dns-5f59b8f679-zrlh8\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.631096 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-config\") pod \"dnsmasq-dns-5f59b8f679-zrlh8\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.631151 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-dns-swift-storage-0\") pod \"dnsmasq-dns-5f59b8f679-zrlh8\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.631191 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-ovsdbserver-nb\") pod \"dnsmasq-dns-5f59b8f679-zrlh8\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.631246 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dfvt\" (UniqueName: \"kubernetes.io/projected/1f3a53b3-e907-4cff-ad72-d6a217bea837-kube-api-access-4dfvt\") pod \"dnsmasq-dns-5f59b8f679-zrlh8\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.632092 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-ovsdbserver-sb\") pod \"dnsmasq-dns-5f59b8f679-zrlh8\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.632127 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-dns-svc\") pod \"dnsmasq-dns-5f59b8f679-zrlh8\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.632644 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-config\") pod \"dnsmasq-dns-5f59b8f679-zrlh8\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.632666 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-dns-swift-storage-0\") pod \"dnsmasq-dns-5f59b8f679-zrlh8\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.632707 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-ovsdbserver-nb\") pod \"dnsmasq-dns-5f59b8f679-zrlh8\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.652648 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dfvt\" (UniqueName: \"kubernetes.io/projected/1f3a53b3-e907-4cff-ad72-d6a217bea837-kube-api-access-4dfvt\") pod \"dnsmasq-dns-5f59b8f679-zrlh8\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.758049 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.948729 4899 generic.go:334] "Generic (PLEG): container finished" podID="237108cb-238e-4abc-a099-60c1ce478004" containerID="7e4717f08297f6e535ff8f209cebedd1559de0f9904ec33aa861120513b406e0" exitCode=0 Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.948830 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.949534 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-cff2w" event={"ID":"237108cb-238e-4abc-a099-60c1ce478004","Type":"ContainerDied","Data":"7e4717f08297f6e535ff8f209cebedd1559de0f9904ec33aa861120513b406e0"} Oct 03 08:56:07 crc kubenswrapper[4899]: I1003 08:56:07.969064 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.139459 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-ovsdbserver-nb\") pod \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.139534 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhr89\" (UniqueName: \"kubernetes.io/projected/639ed76f-a794-4e2f-b1ab-7096abf1ba58-kube-api-access-fhr89\") pod \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.139594 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-ovsdbserver-sb\") pod \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.139634 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-config\") pod \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.139665 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-dns-svc\") pod \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\" (UID: \"639ed76f-a794-4e2f-b1ab-7096abf1ba58\") " Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.140006 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "639ed76f-a794-4e2f-b1ab-7096abf1ba58" (UID: "639ed76f-a794-4e2f-b1ab-7096abf1ba58"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.140281 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "639ed76f-a794-4e2f-b1ab-7096abf1ba58" (UID: "639ed76f-a794-4e2f-b1ab-7096abf1ba58"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.140443 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.140465 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.141281 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-config" (OuterVolumeSpecName: "config") pod "639ed76f-a794-4e2f-b1ab-7096abf1ba58" (UID: "639ed76f-a794-4e2f-b1ab-7096abf1ba58"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.141616 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "639ed76f-a794-4e2f-b1ab-7096abf1ba58" (UID: "639ed76f-a794-4e2f-b1ab-7096abf1ba58"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.145914 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/639ed76f-a794-4e2f-b1ab-7096abf1ba58-kube-api-access-fhr89" (OuterVolumeSpecName: "kube-api-access-fhr89") pod "639ed76f-a794-4e2f-b1ab-7096abf1ba58" (UID: "639ed76f-a794-4e2f-b1ab-7096abf1ba58"). InnerVolumeSpecName "kube-api-access-fhr89". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.216258 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-zrlh8"] Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.241916 4899 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.241937 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhr89\" (UniqueName: \"kubernetes.io/projected/639ed76f-a794-4e2f-b1ab-7096abf1ba58-kube-api-access-fhr89\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.241947 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/639ed76f-a794-4e2f-b1ab-7096abf1ba58-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.280968 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-10c1-account-create-jgc9k" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.344761 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ckj75\" (UniqueName: \"kubernetes.io/projected/a7891f02-e1ab-4c31-a210-971c66974961-kube-api-access-ckj75\") pod \"a7891f02-e1ab-4c31-a210-971c66974961\" (UID: \"a7891f02-e1ab-4c31-a210-971c66974961\") " Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.350031 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7891f02-e1ab-4c31-a210-971c66974961-kube-api-access-ckj75" (OuterVolumeSpecName: "kube-api-access-ckj75") pod "a7891f02-e1ab-4c31-a210-971c66974961" (UID: "a7891f02-e1ab-4c31-a210-971c66974961"). InnerVolumeSpecName "kube-api-access-ckj75". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.378618 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1c09-account-create-qnsmw" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.392794 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-581b-account-create-tbmqr" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.446345 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fsz4\" (UniqueName: \"kubernetes.io/projected/d9905523-1414-4f5e-958d-22ffc752c061-kube-api-access-2fsz4\") pod \"d9905523-1414-4f5e-958d-22ffc752c061\" (UID: \"d9905523-1414-4f5e-958d-22ffc752c061\") " Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.446409 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lflz\" (UniqueName: \"kubernetes.io/projected/9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a-kube-api-access-6lflz\") pod \"9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a\" (UID: \"9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a\") " Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.446796 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ckj75\" (UniqueName: \"kubernetes.io/projected/a7891f02-e1ab-4c31-a210-971c66974961-kube-api-access-ckj75\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.450296 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9905523-1414-4f5e-958d-22ffc752c061-kube-api-access-2fsz4" (OuterVolumeSpecName: "kube-api-access-2fsz4") pod "d9905523-1414-4f5e-958d-22ffc752c061" (UID: "d9905523-1414-4f5e-958d-22ffc752c061"). InnerVolumeSpecName "kube-api-access-2fsz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.450345 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a-kube-api-access-6lflz" (OuterVolumeSpecName: "kube-api-access-6lflz") pod "9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a" (UID: "9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a"). InnerVolumeSpecName "kube-api-access-6lflz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.547670 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fsz4\" (UniqueName: \"kubernetes.io/projected/d9905523-1414-4f5e-958d-22ffc752c061-kube-api-access-2fsz4\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.548266 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lflz\" (UniqueName: \"kubernetes.io/projected/9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a-kube-api-access-6lflz\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.986507 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-10c1-account-create-jgc9k" event={"ID":"a7891f02-e1ab-4c31-a210-971c66974961","Type":"ContainerDied","Data":"d7e6f8ddcff503295cf3ea7ad14d0980f766769568852786e625363581a22a71"} Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.986547 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7e6f8ddcff503295cf3ea7ad14d0980f766769568852786e625363581a22a71" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.986648 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-10c1-account-create-jgc9k" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.997684 4899 generic.go:334] "Generic (PLEG): container finished" podID="1f3a53b3-e907-4cff-ad72-d6a217bea837" containerID="7b81b25b07738fd729abcee40dca5d2fc3b349d3876dd3ebfe5f5eea9072cf7d" exitCode=0 Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.997732 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" event={"ID":"1f3a53b3-e907-4cff-ad72-d6a217bea837","Type":"ContainerDied","Data":"7b81b25b07738fd729abcee40dca5d2fc3b349d3876dd3ebfe5f5eea9072cf7d"} Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.997818 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" event={"ID":"1f3a53b3-e907-4cff-ad72-d6a217bea837","Type":"ContainerStarted","Data":"70edf8f1cd4369113f5bbdd43beea53d399e1f1fbfae3381ee1ad5a6f2d9d34e"} Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.999340 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-581b-account-create-tbmqr" event={"ID":"9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a","Type":"ContainerDied","Data":"2c2b9e2577f925928ff121dfeae4e39b75626314df52e48723c31ff30e99dcda"} Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.999381 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c2b9e2577f925928ff121dfeae4e39b75626314df52e48723c31ff30e99dcda" Oct 03 08:56:08 crc kubenswrapper[4899]: I1003 08:56:08.999358 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-581b-account-create-tbmqr" Oct 03 08:56:09 crc kubenswrapper[4899]: I1003 08:56:09.002602 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1c09-account-create-qnsmw" event={"ID":"d9905523-1414-4f5e-958d-22ffc752c061","Type":"ContainerDied","Data":"01337fa7ca8c30a619a7498eff3ed39a8bceeb30f64d245b64926d8cf68b299e"} Oct 03 08:56:09 crc kubenswrapper[4899]: I1003 08:56:09.002634 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1c09-account-create-qnsmw" Oct 03 08:56:09 crc kubenswrapper[4899]: I1003 08:56:09.002639 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="01337fa7ca8c30a619a7498eff3ed39a8bceeb30f64d245b64926d8cf68b299e" Oct 03 08:56:09 crc kubenswrapper[4899]: I1003 08:56:09.002676 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74dc88fc-qcsh2" Oct 03 08:56:09 crc kubenswrapper[4899]: I1003 08:56:09.191217 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-qcsh2"] Oct 03 08:56:09 crc kubenswrapper[4899]: I1003 08:56:09.193482 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-qcsh2"] Oct 03 08:56:09 crc kubenswrapper[4899]: I1003 08:56:09.340041 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-cff2w" Oct 03 08:56:09 crc kubenswrapper[4899]: I1003 08:56:09.373511 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237108cb-238e-4abc-a099-60c1ce478004-combined-ca-bundle\") pod \"237108cb-238e-4abc-a099-60c1ce478004\" (UID: \"237108cb-238e-4abc-a099-60c1ce478004\") " Oct 03 08:56:09 crc kubenswrapper[4899]: I1003 08:56:09.373558 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/237108cb-238e-4abc-a099-60c1ce478004-config-data\") pod \"237108cb-238e-4abc-a099-60c1ce478004\" (UID: \"237108cb-238e-4abc-a099-60c1ce478004\") " Oct 03 08:56:09 crc kubenswrapper[4899]: I1003 08:56:09.373636 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52fmt\" (UniqueName: \"kubernetes.io/projected/237108cb-238e-4abc-a099-60c1ce478004-kube-api-access-52fmt\") pod \"237108cb-238e-4abc-a099-60c1ce478004\" (UID: \"237108cb-238e-4abc-a099-60c1ce478004\") " Oct 03 08:56:09 crc kubenswrapper[4899]: I1003 08:56:09.379016 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/237108cb-238e-4abc-a099-60c1ce478004-kube-api-access-52fmt" (OuterVolumeSpecName: "kube-api-access-52fmt") pod "237108cb-238e-4abc-a099-60c1ce478004" (UID: "237108cb-238e-4abc-a099-60c1ce478004"). InnerVolumeSpecName "kube-api-access-52fmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:56:09 crc kubenswrapper[4899]: I1003 08:56:09.395856 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/237108cb-238e-4abc-a099-60c1ce478004-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "237108cb-238e-4abc-a099-60c1ce478004" (UID: "237108cb-238e-4abc-a099-60c1ce478004"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:09 crc kubenswrapper[4899]: I1003 08:56:09.416228 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/237108cb-238e-4abc-a099-60c1ce478004-config-data" (OuterVolumeSpecName: "config-data") pod "237108cb-238e-4abc-a099-60c1ce478004" (UID: "237108cb-238e-4abc-a099-60c1ce478004"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:09 crc kubenswrapper[4899]: I1003 08:56:09.475174 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52fmt\" (UniqueName: \"kubernetes.io/projected/237108cb-238e-4abc-a099-60c1ce478004-kube-api-access-52fmt\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:09 crc kubenswrapper[4899]: I1003 08:56:09.475213 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/237108cb-238e-4abc-a099-60c1ce478004-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:09 crc kubenswrapper[4899]: I1003 08:56:09.475227 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/237108cb-238e-4abc-a099-60c1ce478004-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.011799 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-cff2w" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.012017 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-cff2w" event={"ID":"237108cb-238e-4abc-a099-60c1ce478004","Type":"ContainerDied","Data":"676bdab4dc49e3287fafe108204f1c99a6520c10e4067df0a2e13ece53f427c9"} Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.012236 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="676bdab4dc49e3287fafe108204f1c99a6520c10e4067df0a2e13ece53f427c9" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.014525 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" event={"ID":"1f3a53b3-e907-4cff-ad72-d6a217bea837","Type":"ContainerStarted","Data":"740c83954c0bb7005af958c1be9b3f097c78c2d38572ce378efa8543f8bc92c2"} Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.014814 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.036015 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" podStartSLOduration=3.035991661 podStartE2EDuration="3.035991661s" podCreationTimestamp="2025-10-03 08:56:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:56:10.028588097 +0000 UTC m=+944.136073050" watchObservedRunningTime="2025-10-03 08:56:10.035991661 +0000 UTC m=+944.143476614" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.191955 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-zrlh8"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.208769 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-9k4zg"] Oct 03 08:56:10 crc kubenswrapper[4899]: E1003 08:56:10.209358 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7891f02-e1ab-4c31-a210-971c66974961" containerName="mariadb-account-create" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.209378 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7891f02-e1ab-4c31-a210-971c66974961" containerName="mariadb-account-create" Oct 03 08:56:10 crc kubenswrapper[4899]: E1003 08:56:10.209403 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9905523-1414-4f5e-958d-22ffc752c061" containerName="mariadb-account-create" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.209410 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9905523-1414-4f5e-958d-22ffc752c061" containerName="mariadb-account-create" Oct 03 08:56:10 crc kubenswrapper[4899]: E1003 08:56:10.209428 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="237108cb-238e-4abc-a099-60c1ce478004" containerName="keystone-db-sync" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.209435 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="237108cb-238e-4abc-a099-60c1ce478004" containerName="keystone-db-sync" Oct 03 08:56:10 crc kubenswrapper[4899]: E1003 08:56:10.209458 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a" containerName="mariadb-account-create" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.209465 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a" containerName="mariadb-account-create" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.209664 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9905523-1414-4f5e-958d-22ffc752c061" containerName="mariadb-account-create" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.209689 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a" containerName="mariadb-account-create" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.209714 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7891f02-e1ab-4c31-a210-971c66974961" containerName="mariadb-account-create" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.209727 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="237108cb-238e-4abc-a099-60c1ce478004" containerName="keystone-db-sync" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.211190 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.221220 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-9k4zg"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.231506 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-fp7gs"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.232633 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.247098 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.247310 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.247482 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.247674 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6hp7k" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.271274 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-fp7gs"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.352530 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-58b885464f-hm46m"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.354353 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.362407 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.362644 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-zrbfl" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.369601 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.369787 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.384758 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-58b885464f-hm46m"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.393463 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-horizon-secret-key\") pod \"horizon-58b885464f-hm46m\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.393502 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-config-data\") pod \"keystone-bootstrap-fp7gs\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.393522 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2chgw\" (UniqueName: \"kubernetes.io/projected/00702fc3-6e90-4171-85c6-03f71bb45256-kube-api-access-2chgw\") pod \"keystone-bootstrap-fp7gs\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.393542 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-config-data\") pod \"horizon-58b885464f-hm46m\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.393559 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzld8\" (UniqueName: \"kubernetes.io/projected/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-kube-api-access-hzld8\") pod \"horizon-58b885464f-hm46m\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.393593 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-scripts\") pod \"horizon-58b885464f-hm46m\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.393609 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-fernet-keys\") pod \"keystone-bootstrap-fp7gs\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.393629 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-dns-svc\") pod \"dnsmasq-dns-bbf5cc879-9k4zg\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.393644 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-combined-ca-bundle\") pod \"keystone-bootstrap-fp7gs\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.393661 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-scripts\") pod \"keystone-bootstrap-fp7gs\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.393683 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-dns-swift-storage-0\") pod \"dnsmasq-dns-bbf5cc879-9k4zg\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.393721 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-logs\") pod \"horizon-58b885464f-hm46m\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.393739 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-config\") pod \"dnsmasq-dns-bbf5cc879-9k4zg\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.393764 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-credential-keys\") pod \"keystone-bootstrap-fp7gs\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.393782 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-ovsdbserver-sb\") pod \"dnsmasq-dns-bbf5cc879-9k4zg\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.393803 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-ovsdbserver-nb\") pod \"dnsmasq-dns-bbf5cc879-9k4zg\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.393825 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpzct\" (UniqueName: \"kubernetes.io/projected/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-kube-api-access-mpzct\") pod \"dnsmasq-dns-bbf5cc879-9k4zg\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.477559 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.479773 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.483232 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.485687 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.494728 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-config-data\") pod \"horizon-58b885464f-hm46m\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.494762 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzld8\" (UniqueName: \"kubernetes.io/projected/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-kube-api-access-hzld8\") pod \"horizon-58b885464f-hm46m\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.494803 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcn9b\" (UniqueName: \"kubernetes.io/projected/8dc2bbb6-802d-4713-935a-353136d48619-kube-api-access-jcn9b\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.494820 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-scripts\") pod \"horizon-58b885464f-hm46m\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.494838 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8dc2bbb6-802d-4713-935a-353136d48619-run-httpd\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.494856 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-fernet-keys\") pod \"keystone-bootstrap-fp7gs\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.494876 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-dns-svc\") pod \"dnsmasq-dns-bbf5cc879-9k4zg\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.494902 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-combined-ca-bundle\") pod \"keystone-bootstrap-fp7gs\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.494920 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-scripts\") pod \"keystone-bootstrap-fp7gs\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.494940 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-dns-swift-storage-0\") pod \"dnsmasq-dns-bbf5cc879-9k4zg\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.494969 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-config-data\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.494989 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.495014 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-scripts\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.495039 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-logs\") pod \"horizon-58b885464f-hm46m\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.495058 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-config\") pod \"dnsmasq-dns-bbf5cc879-9k4zg\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.495077 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.495103 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-credential-keys\") pod \"keystone-bootstrap-fp7gs\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.495120 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-ovsdbserver-sb\") pod \"dnsmasq-dns-bbf5cc879-9k4zg\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.495137 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8dc2bbb6-802d-4713-935a-353136d48619-log-httpd\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.495153 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-ovsdbserver-nb\") pod \"dnsmasq-dns-bbf5cc879-9k4zg\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.495175 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpzct\" (UniqueName: \"kubernetes.io/projected/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-kube-api-access-mpzct\") pod \"dnsmasq-dns-bbf5cc879-9k4zg\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.495195 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-config-data\") pod \"keystone-bootstrap-fp7gs\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.495209 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-horizon-secret-key\") pod \"horizon-58b885464f-hm46m\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.495228 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2chgw\" (UniqueName: \"kubernetes.io/projected/00702fc3-6e90-4171-85c6-03f71bb45256-kube-api-access-2chgw\") pod \"keystone-bootstrap-fp7gs\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.496045 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-scripts\") pod \"horizon-58b885464f-hm46m\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.496932 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-config\") pod \"dnsmasq-dns-bbf5cc879-9k4zg\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.497160 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-ovsdbserver-nb\") pod \"dnsmasq-dns-bbf5cc879-9k4zg\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.497628 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-ovsdbserver-sb\") pod \"dnsmasq-dns-bbf5cc879-9k4zg\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.498706 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-dns-swift-storage-0\") pod \"dnsmasq-dns-bbf5cc879-9k4zg\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.498946 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-dns-svc\") pod \"dnsmasq-dns-bbf5cc879-9k4zg\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.499314 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-config-data\") pod \"horizon-58b885464f-hm46m\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.499387 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-logs\") pod \"horizon-58b885464f-hm46m\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.506729 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-combined-ca-bundle\") pod \"keystone-bootstrap-fp7gs\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.506802 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-q9f5f"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.510179 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.511304 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-credential-keys\") pod \"keystone-bootstrap-fp7gs\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.511947 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-69lhb"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.512861 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.516720 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-config-data\") pod \"keystone-bootstrap-fp7gs\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.517469 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-fernet-keys\") pod \"keystone-bootstrap-fp7gs\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.517664 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-r7w6n" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.517820 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.517983 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.518165 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-btcbf" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.518279 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.518381 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.531748 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-scripts\") pod \"keystone-bootstrap-fp7gs\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.533141 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-horizon-secret-key\") pod \"horizon-58b885464f-hm46m\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.540916 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzld8\" (UniqueName: \"kubernetes.io/projected/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-kube-api-access-hzld8\") pod \"horizon-58b885464f-hm46m\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.546770 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="639ed76f-a794-4e2f-b1ab-7096abf1ba58" path="/var/lib/kubelet/pods/639ed76f-a794-4e2f-b1ab-7096abf1ba58/volumes" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.547149 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.551359 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-q9f5f"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.560334 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2chgw\" (UniqueName: \"kubernetes.io/projected/00702fc3-6e90-4171-85c6-03f71bb45256-kube-api-access-2chgw\") pod \"keystone-bootstrap-fp7gs\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.560920 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpzct\" (UniqueName: \"kubernetes.io/projected/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-kube-api-access-mpzct\") pod \"dnsmasq-dns-bbf5cc879-9k4zg\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.562930 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-69lhb"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.574676 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.599040 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-9k4zg"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.599770 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.601620 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-config-data\") pod \"cinder-db-sync-q9f5f\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.601668 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcn9b\" (UniqueName: \"kubernetes.io/projected/8dc2bbb6-802d-4713-935a-353136d48619-kube-api-access-jcn9b\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.601687 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8dc2bbb6-802d-4713-935a-353136d48619-run-httpd\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.601709 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5545cd7a-7849-48e5-91f3-6a3a8d51e665-etc-machine-id\") pod \"cinder-db-sync-q9f5f\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.601742 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-scripts\") pod \"cinder-db-sync-q9f5f\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.601761 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-config-data\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.601778 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-db-sync-config-data\") pod \"cinder-db-sync-q9f5f\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.601798 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.601819 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-combined-ca-bundle\") pod \"cinder-db-sync-q9f5f\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.601836 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-scripts\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.601854 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cf5vp\" (UniqueName: \"kubernetes.io/projected/5545cd7a-7849-48e5-91f3-6a3a8d51e665-kube-api-access-cf5vp\") pod \"cinder-db-sync-q9f5f\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.601875 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.601923 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8dc2bbb6-802d-4713-935a-353136d48619-log-httpd\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.602274 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8dc2bbb6-802d-4713-935a-353136d48619-log-httpd\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.602719 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8dc2bbb6-802d-4713-935a-353136d48619-run-httpd\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.618262 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.627194 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-config-data\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.634430 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.640655 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-scripts\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.644257 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcn9b\" (UniqueName: \"kubernetes.io/projected/8dc2bbb6-802d-4713-935a-353136d48619-kube-api-access-jcn9b\") pod \"ceilometer-0\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.654474 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-rp67d"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.662121 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.684697 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.698095 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-rp67d"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.704224 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4663e6c0-d0e2-49f9-a457-8bc02fefa635-scripts\") pod \"placement-db-sync-69lhb\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.704359 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-config-data\") pod \"cinder-db-sync-q9f5f\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.704397 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5545cd7a-7849-48e5-91f3-6a3a8d51e665-etc-machine-id\") pod \"cinder-db-sync-q9f5f\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.704424 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4663e6c0-d0e2-49f9-a457-8bc02fefa635-combined-ca-bundle\") pod \"placement-db-sync-69lhb\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.704456 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-scripts\") pod \"cinder-db-sync-q9f5f\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.704475 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4663e6c0-d0e2-49f9-a457-8bc02fefa635-logs\") pod \"placement-db-sync-69lhb\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.704493 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-db-sync-config-data\") pod \"cinder-db-sync-q9f5f\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.704533 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x4h9\" (UniqueName: \"kubernetes.io/projected/4663e6c0-d0e2-49f9-a457-8bc02fefa635-kube-api-access-8x4h9\") pod \"placement-db-sync-69lhb\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.704551 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-combined-ca-bundle\") pod \"cinder-db-sync-q9f5f\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.704572 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cf5vp\" (UniqueName: \"kubernetes.io/projected/5545cd7a-7849-48e5-91f3-6a3a8d51e665-kube-api-access-cf5vp\") pod \"cinder-db-sync-q9f5f\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.704648 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4663e6c0-d0e2-49f9-a457-8bc02fefa635-config-data\") pod \"placement-db-sync-69lhb\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.706967 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5545cd7a-7849-48e5-91f3-6a3a8d51e665-etc-machine-id\") pod \"cinder-db-sync-q9f5f\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.710704 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-config-data\") pod \"cinder-db-sync-q9f5f\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.711255 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-db-sync-config-data\") pod \"cinder-db-sync-q9f5f\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.724401 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.724741 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-scripts\") pod \"cinder-db-sync-q9f5f\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.726035 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.731070 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-combined-ca-bundle\") pod \"cinder-db-sync-q9f5f\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.746221 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-9s7jc" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.749082 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.761979 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.762181 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.762296 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.764527 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cf5vp\" (UniqueName: \"kubernetes.io/projected/5545cd7a-7849-48e5-91f3-6a3a8d51e665-kube-api-access-cf5vp\") pod \"cinder-db-sync-q9f5f\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.788150 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-78dbfb6845-276zt"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.789614 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.797421 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-78dbfb6845-276zt"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.807638 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-config\") pod \"dnsmasq-dns-56df8fb6b7-rp67d\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.807709 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4663e6c0-d0e2-49f9-a457-8bc02fefa635-config-data\") pod \"placement-db-sync-69lhb\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.807736 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-rp67d\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.807775 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4663e6c0-d0e2-49f9-a457-8bc02fefa635-scripts\") pod \"placement-db-sync-69lhb\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.807810 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5zt7\" (UniqueName: \"kubernetes.io/projected/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-kube-api-access-g5zt7\") pod \"dnsmasq-dns-56df8fb6b7-rp67d\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.807838 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4663e6c0-d0e2-49f9-a457-8bc02fefa635-combined-ca-bundle\") pod \"placement-db-sync-69lhb\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.807880 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4663e6c0-d0e2-49f9-a457-8bc02fefa635-logs\") pod \"placement-db-sync-69lhb\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.808108 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-rp67d\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.808139 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x4h9\" (UniqueName: \"kubernetes.io/projected/4663e6c0-d0e2-49f9-a457-8bc02fefa635-kube-api-access-8x4h9\") pod \"placement-db-sync-69lhb\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.808157 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-rp67d\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.808182 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-rp67d\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.809648 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4663e6c0-d0e2-49f9-a457-8bc02fefa635-logs\") pod \"placement-db-sync-69lhb\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.810438 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.812788 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.819387 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4663e6c0-d0e2-49f9-a457-8bc02fefa635-config-data\") pod \"placement-db-sync-69lhb\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.820170 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4663e6c0-d0e2-49f9-a457-8bc02fefa635-combined-ca-bundle\") pod \"placement-db-sync-69lhb\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.822987 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4663e6c0-d0e2-49f9-a457-8bc02fefa635-scripts\") pod \"placement-db-sync-69lhb\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.838256 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x4h9\" (UniqueName: \"kubernetes.io/projected/4663e6c0-d0e2-49f9-a457-8bc02fefa635-kube-api-access-8x4h9\") pod \"placement-db-sync-69lhb\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.850832 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.863987 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.866617 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.866951 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.885644 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.911124 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.911201 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/190b54d9-49f3-4d1a-aac5-ab680fafa605-scripts\") pod \"horizon-78dbfb6845-276zt\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.911242 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/953831ef-1753-4bd9-896d-a769b212b0ae-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.911313 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5zt7\" (UniqueName: \"kubernetes.io/projected/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-kube-api-access-g5zt7\") pod \"dnsmasq-dns-56df8fb6b7-rp67d\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.911369 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7mp5\" (UniqueName: \"kubernetes.io/projected/953831ef-1753-4bd9-896d-a769b212b0ae-kube-api-access-b7mp5\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.911429 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/190b54d9-49f3-4d1a-aac5-ab680fafa605-horizon-secret-key\") pod \"horizon-78dbfb6845-276zt\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.911509 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-scripts\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.911550 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-config-data\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.911583 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.911629 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-rp67d\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.911664 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/190b54d9-49f3-4d1a-aac5-ab680fafa605-logs\") pod \"horizon-78dbfb6845-276zt\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.911709 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-rp67d\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.911731 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/953831ef-1753-4bd9-896d-a769b212b0ae-logs\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.912360 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-rp67d\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.912850 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-rp67d\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.912872 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-rp67d\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.912991 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.913021 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-config\") pod \"dnsmasq-dns-56df8fb6b7-rp67d\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.913069 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/190b54d9-49f3-4d1a-aac5-ab680fafa605-config-data\") pod \"horizon-78dbfb6845-276zt\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.913119 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps45n\" (UniqueName: \"kubernetes.io/projected/190b54d9-49f3-4d1a-aac5-ab680fafa605-kube-api-access-ps45n\") pod \"horizon-78dbfb6845-276zt\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.913160 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-rp67d\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.913790 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-rp67d\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.916073 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-config\") pod \"dnsmasq-dns-56df8fb6b7-rp67d\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:10 crc kubenswrapper[4899]: I1003 08:56:10.976388 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-rp67d\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.004022 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5zt7\" (UniqueName: \"kubernetes.io/projected/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-kube-api-access-g5zt7\") pod \"dnsmasq-dns-56df8fb6b7-rp67d\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.026783 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps45n\" (UniqueName: \"kubernetes.io/projected/190b54d9-49f3-4d1a-aac5-ab680fafa605-kube-api-access-ps45n\") pod \"horizon-78dbfb6845-276zt\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.026942 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.026978 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/190b54d9-49f3-4d1a-aac5-ab680fafa605-scripts\") pod \"horizon-78dbfb6845-276zt\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.027018 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/953831ef-1753-4bd9-896d-a769b212b0ae-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.027064 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ee3630d8-5170-449b-aefc-4c5443da6f9f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.027131 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7mp5\" (UniqueName: \"kubernetes.io/projected/953831ef-1753-4bd9-896d-a769b212b0ae-kube-api-access-b7mp5\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.027163 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.027180 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzts7\" (UniqueName: \"kubernetes.io/projected/ee3630d8-5170-449b-aefc-4c5443da6f9f-kube-api-access-hzts7\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.027245 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.027268 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/190b54d9-49f3-4d1a-aac5-ab680fafa605-horizon-secret-key\") pod \"horizon-78dbfb6845-276zt\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.027305 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-scripts\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.027323 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-config-data\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.027341 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee3630d8-5170-449b-aefc-4c5443da6f9f-logs\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.027357 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.027380 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.027427 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/190b54d9-49f3-4d1a-aac5-ab680fafa605-logs\") pod \"horizon-78dbfb6845-276zt\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.027490 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/953831ef-1753-4bd9-896d-a769b212b0ae-logs\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.027516 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.027556 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.027609 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/190b54d9-49f3-4d1a-aac5-ab680fafa605-config-data\") pod \"horizon-78dbfb6845-276zt\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.027631 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.031376 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/190b54d9-49f3-4d1a-aac5-ab680fafa605-scripts\") pod \"horizon-78dbfb6845-276zt\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.035713 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/953831ef-1753-4bd9-896d-a769b212b0ae-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.037347 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-jf5f9"] Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.040367 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/190b54d9-49f3-4d1a-aac5-ab680fafa605-horizon-secret-key\") pod \"horizon-78dbfb6845-276zt\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.041628 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.043325 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/953831ef-1753-4bd9-896d-a769b212b0ae-logs\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.043351 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/190b54d9-49f3-4d1a-aac5-ab680fafa605-logs\") pod \"horizon-78dbfb6845-276zt\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.048139 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jf5f9" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.048527 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.049413 4899 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.051020 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/190b54d9-49f3-4d1a-aac5-ab680fafa605-config-data\") pod \"horizon-78dbfb6845-276zt\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.057449 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-config-data\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.061309 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-rh6dp"] Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.062929 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-rh6dp" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.066470 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps45n\" (UniqueName: \"kubernetes.io/projected/190b54d9-49f3-4d1a-aac5-ab680fafa605-kube-api-access-ps45n\") pod \"horizon-78dbfb6845-276zt\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.070289 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.080289 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7mp5\" (UniqueName: \"kubernetes.io/projected/953831ef-1753-4bd9-896d-a769b212b0ae-kube-api-access-b7mp5\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.080411 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.080663 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-4flr9" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.080817 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-ltqhc" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.081023 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.084399 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-scripts\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.099297 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-jf5f9"] Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.129084 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.129160 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c2decdc4-50c4-4370-8551-fe73d6918bcd-config\") pod \"neutron-db-sync-rh6dp\" (UID: \"c2decdc4-50c4-4370-8551-fe73d6918bcd\") " pod="openstack/neutron-db-sync-rh6dp" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.129229 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.129300 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d0954fe-c339-493b-a2ca-5d30b54bc603-combined-ca-bundle\") pod \"barbican-db-sync-jf5f9\" (UID: \"5d0954fe-c339-493b-a2ca-5d30b54bc603\") " pod="openstack/barbican-db-sync-jf5f9" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.129337 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5d0954fe-c339-493b-a2ca-5d30b54bc603-db-sync-config-data\") pod \"barbican-db-sync-jf5f9\" (UID: \"5d0954fe-c339-493b-a2ca-5d30b54bc603\") " pod="openstack/barbican-db-sync-jf5f9" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.129395 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ee3630d8-5170-449b-aefc-4c5443da6f9f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.129411 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2decdc4-50c4-4370-8551-fe73d6918bcd-combined-ca-bundle\") pod \"neutron-db-sync-rh6dp\" (UID: \"c2decdc4-50c4-4370-8551-fe73d6918bcd\") " pod="openstack/neutron-db-sync-rh6dp" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.129426 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grmzz\" (UniqueName: \"kubernetes.io/projected/5d0954fe-c339-493b-a2ca-5d30b54bc603-kube-api-access-grmzz\") pod \"barbican-db-sync-jf5f9\" (UID: \"5d0954fe-c339-493b-a2ca-5d30b54bc603\") " pod="openstack/barbican-db-sync-jf5f9" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.129484 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.129499 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzts7\" (UniqueName: \"kubernetes.io/projected/ee3630d8-5170-449b-aefc-4c5443da6f9f-kube-api-access-hzts7\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.129533 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8gwb\" (UniqueName: \"kubernetes.io/projected/c2decdc4-50c4-4370-8551-fe73d6918bcd-kube-api-access-w8gwb\") pod \"neutron-db-sync-rh6dp\" (UID: \"c2decdc4-50c4-4370-8551-fe73d6918bcd\") " pod="openstack/neutron-db-sync-rh6dp" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.129553 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.129571 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee3630d8-5170-449b-aefc-4c5443da6f9f-logs\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.129585 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.133355 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-rh6dp"] Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.135703 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ee3630d8-5170-449b-aefc-4c5443da6f9f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.136328 4899 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.141104 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee3630d8-5170-449b-aefc-4c5443da6f9f-logs\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.142172 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.143062 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.147840 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.149480 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.151197 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.155150 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.163712 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:11 crc kubenswrapper[4899]: I1003 08:56:11.174358 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.199105 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzts7\" (UniqueName: \"kubernetes.io/projected/ee3630d8-5170-449b-aefc-4c5443da6f9f-kube-api-access-hzts7\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.243464 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.255532 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.259235 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.274049 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5d0954fe-c339-493b-a2ca-5d30b54bc603-db-sync-config-data\") pod \"barbican-db-sync-jf5f9\" (UID: \"5d0954fe-c339-493b-a2ca-5d30b54bc603\") " pod="openstack/barbican-db-sync-jf5f9" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.274119 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2decdc4-50c4-4370-8551-fe73d6918bcd-combined-ca-bundle\") pod \"neutron-db-sync-rh6dp\" (UID: \"c2decdc4-50c4-4370-8551-fe73d6918bcd\") " pod="openstack/neutron-db-sync-rh6dp" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.274136 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grmzz\" (UniqueName: \"kubernetes.io/projected/5d0954fe-c339-493b-a2ca-5d30b54bc603-kube-api-access-grmzz\") pod \"barbican-db-sync-jf5f9\" (UID: \"5d0954fe-c339-493b-a2ca-5d30b54bc603\") " pod="openstack/barbican-db-sync-jf5f9" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.274185 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8gwb\" (UniqueName: \"kubernetes.io/projected/c2decdc4-50c4-4370-8551-fe73d6918bcd-kube-api-access-w8gwb\") pod \"neutron-db-sync-rh6dp\" (UID: \"c2decdc4-50c4-4370-8551-fe73d6918bcd\") " pod="openstack/neutron-db-sync-rh6dp" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.274257 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c2decdc4-50c4-4370-8551-fe73d6918bcd-config\") pod \"neutron-db-sync-rh6dp\" (UID: \"c2decdc4-50c4-4370-8551-fe73d6918bcd\") " pod="openstack/neutron-db-sync-rh6dp" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.274340 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d0954fe-c339-493b-a2ca-5d30b54bc603-combined-ca-bundle\") pod \"barbican-db-sync-jf5f9\" (UID: \"5d0954fe-c339-493b-a2ca-5d30b54bc603\") " pod="openstack/barbican-db-sync-jf5f9" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.282560 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d0954fe-c339-493b-a2ca-5d30b54bc603-combined-ca-bundle\") pod \"barbican-db-sync-jf5f9\" (UID: \"5d0954fe-c339-493b-a2ca-5d30b54bc603\") " pod="openstack/barbican-db-sync-jf5f9" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.284049 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5d0954fe-c339-493b-a2ca-5d30b54bc603-db-sync-config-data\") pod \"barbican-db-sync-jf5f9\" (UID: \"5d0954fe-c339-493b-a2ca-5d30b54bc603\") " pod="openstack/barbican-db-sync-jf5f9" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.312180 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2decdc4-50c4-4370-8551-fe73d6918bcd-combined-ca-bundle\") pod \"neutron-db-sync-rh6dp\" (UID: \"c2decdc4-50c4-4370-8551-fe73d6918bcd\") " pod="openstack/neutron-db-sync-rh6dp" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.338856 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c2decdc4-50c4-4370-8551-fe73d6918bcd-config\") pod \"neutron-db-sync-rh6dp\" (UID: \"c2decdc4-50c4-4370-8551-fe73d6918bcd\") " pod="openstack/neutron-db-sync-rh6dp" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.357811 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8gwb\" (UniqueName: \"kubernetes.io/projected/c2decdc4-50c4-4370-8551-fe73d6918bcd-kube-api-access-w8gwb\") pod \"neutron-db-sync-rh6dp\" (UID: \"c2decdc4-50c4-4370-8551-fe73d6918bcd\") " pod="openstack/neutron-db-sync-rh6dp" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.361428 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grmzz\" (UniqueName: \"kubernetes.io/projected/5d0954fe-c339-493b-a2ca-5d30b54bc603-kube-api-access-grmzz\") pod \"barbican-db-sync-jf5f9\" (UID: \"5d0954fe-c339-493b-a2ca-5d30b54bc603\") " pod="openstack/barbican-db-sync-jf5f9" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.404121 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jf5f9" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.434269 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-rh6dp" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.434667 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-58b885464f-hm46m"] Oct 03 08:56:13 crc kubenswrapper[4899]: W1003 08:56:11.446702 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc45fd4cf_daa8_4226_bb75_c55604b5ccb6.slice/crio-b633321415ebbc15bbbc16b1d2b1a0cf6af529257f4be1c36337cc7dae1f9ff3 WatchSource:0}: Error finding container b633321415ebbc15bbbc16b1d2b1a0cf6af529257f4be1c36337cc7dae1f9ff3: Status 404 returned error can't find the container with id b633321415ebbc15bbbc16b1d2b1a0cf6af529257f4be1c36337cc7dae1f9ff3 Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.464622 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-fp7gs"] Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:11.480078 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-9k4zg"] Oct 03 08:56:13 crc kubenswrapper[4899]: W1003 08:56:11.623167 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b92f0ed_4578_4bbd_b7f3_abbe7fbe84a3.slice/crio-b76bd7a5ffe4fa91d8b03ce5c14d940a2596fe048e23554972a3677f1417df0c WatchSource:0}: Error finding container b76bd7a5ffe4fa91d8b03ce5c14d940a2596fe048e23554972a3677f1417df0c: Status 404 returned error can't find the container with id b76bd7a5ffe4fa91d8b03ce5c14d940a2596fe048e23554972a3677f1417df0c Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:12.054256 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fp7gs" event={"ID":"00702fc3-6e90-4171-85c6-03f71bb45256","Type":"ContainerStarted","Data":"0ab4a7f15c37af4f84440d62001a83056f515b5cac4f206b53956cc00644e50e"} Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:12.055791 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-58b885464f-hm46m" event={"ID":"c45fd4cf-daa8-4226-bb75-c55604b5ccb6","Type":"ContainerStarted","Data":"b633321415ebbc15bbbc16b1d2b1a0cf6af529257f4be1c36337cc7dae1f9ff3"} Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:12.057222 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" event={"ID":"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3","Type":"ContainerStarted","Data":"b76bd7a5ffe4fa91d8b03ce5c14d940a2596fe048e23554972a3677f1417df0c"} Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:12.057386 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" podUID="1f3a53b3-e907-4cff-ad72-d6a217bea837" containerName="dnsmasq-dns" containerID="cri-o://740c83954c0bb7005af958c1be9b3f097c78c2d38572ce378efa8543f8bc92c2" gracePeriod=10 Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:12.198458 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:12.198513 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:12.198561 4899 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:12.199304 4899 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"56a8af8272c5f0a0da6c3696ab9575cf28a2d27b7f650888f1dd37ac24669078"} pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:12.199364 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" containerID="cri-o://56a8af8272c5f0a0da6c3696ab9575cf28a2d27b7f650888f1dd37ac24669078" gracePeriod=600 Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:13.071496 4899 generic.go:334] "Generic (PLEG): container finished" podID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerID="56a8af8272c5f0a0da6c3696ab9575cf28a2d27b7f650888f1dd37ac24669078" exitCode=0 Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:13.071561 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerDied","Data":"56a8af8272c5f0a0da6c3696ab9575cf28a2d27b7f650888f1dd37ac24669078"} Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:13.071598 4899 scope.go:117] "RemoveContainer" containerID="0b65c263cdff33924e77f87b039bd41bb8cbe3269904b715ae19db8a65d88bf8" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:13.074380 4899 generic.go:334] "Generic (PLEG): container finished" podID="1f3a53b3-e907-4cff-ad72-d6a217bea837" containerID="740c83954c0bb7005af958c1be9b3f097c78c2d38572ce378efa8543f8bc92c2" exitCode=0 Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:13.074425 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" event={"ID":"1f3a53b3-e907-4cff-ad72-d6a217bea837","Type":"ContainerDied","Data":"740c83954c0bb7005af958c1be9b3f097c78c2d38572ce378efa8543f8bc92c2"} Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:13.758701 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:13.858147 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-58b885464f-hm46m"] Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:13.906260 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5874b7f477-6z4bj"] Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:13.907825 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:56:13 crc kubenswrapper[4899]: I1003 08:56:13.969999 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:13.998706 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5874b7f477-6z4bj"] Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.033569 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3dd81aae-6922-43f5-83a6-c81a06bfedea-horizon-secret-key\") pod \"horizon-5874b7f477-6z4bj\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.033650 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3dd81aae-6922-43f5-83a6-c81a06bfedea-scripts\") pod \"horizon-5874b7f477-6z4bj\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.033710 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3dd81aae-6922-43f5-83a6-c81a06bfedea-config-data\") pod \"horizon-5874b7f477-6z4bj\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.033752 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qq5f2\" (UniqueName: \"kubernetes.io/projected/3dd81aae-6922-43f5-83a6-c81a06bfedea-kube-api-access-qq5f2\") pod \"horizon-5874b7f477-6z4bj\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.033847 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dd81aae-6922-43f5-83a6-c81a06bfedea-logs\") pod \"horizon-5874b7f477-6z4bj\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.112255 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerStarted","Data":"890db85d4fbdd365302c6e5ed34afe49d3195769246d9b9d458ffa94cd16c5f8"} Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.117726 4899 generic.go:334] "Generic (PLEG): container finished" podID="8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3" containerID="38b6a02fd76450a4794a552b9b28a8f2a069a7ebfee1bd68ce47405ca12615d8" exitCode=0 Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.117805 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" event={"ID":"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3","Type":"ContainerDied","Data":"38b6a02fd76450a4794a552b9b28a8f2a069a7ebfee1bd68ce47405ca12615d8"} Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.120541 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fp7gs" event={"ID":"00702fc3-6e90-4171-85c6-03f71bb45256","Type":"ContainerStarted","Data":"ffa3895e7d7efddf5d57593af3042344fd7f1f73bbc572ece7d300f48722c796"} Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.135155 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3dd81aae-6922-43f5-83a6-c81a06bfedea-config-data\") pod \"horizon-5874b7f477-6z4bj\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.135231 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qq5f2\" (UniqueName: \"kubernetes.io/projected/3dd81aae-6922-43f5-83a6-c81a06bfedea-kube-api-access-qq5f2\") pod \"horizon-5874b7f477-6z4bj\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.135293 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dd81aae-6922-43f5-83a6-c81a06bfedea-logs\") pod \"horizon-5874b7f477-6z4bj\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.136698 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dd81aae-6922-43f5-83a6-c81a06bfedea-logs\") pod \"horizon-5874b7f477-6z4bj\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.137025 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3dd81aae-6922-43f5-83a6-c81a06bfedea-config-data\") pod \"horizon-5874b7f477-6z4bj\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.139273 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3dd81aae-6922-43f5-83a6-c81a06bfedea-horizon-secret-key\") pod \"horizon-5874b7f477-6z4bj\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.139437 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3dd81aae-6922-43f5-83a6-c81a06bfedea-scripts\") pod \"horizon-5874b7f477-6z4bj\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.140225 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3dd81aae-6922-43f5-83a6-c81a06bfedea-scripts\") pod \"horizon-5874b7f477-6z4bj\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.169426 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qq5f2\" (UniqueName: \"kubernetes.io/projected/3dd81aae-6922-43f5-83a6-c81a06bfedea-kube-api-access-qq5f2\") pod \"horizon-5874b7f477-6z4bj\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.174624 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.175069 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3dd81aae-6922-43f5-83a6-c81a06bfedea-horizon-secret-key\") pod \"horizon-5874b7f477-6z4bj\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.180692 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-fp7gs" podStartSLOduration=4.18067242 podStartE2EDuration="4.18067242s" podCreationTimestamp="2025-10-03 08:56:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:56:14.163487099 +0000 UTC m=+948.270972042" watchObservedRunningTime="2025-10-03 08:56:14.18067242 +0000 UTC m=+948.288157373" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.187958 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.247399 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.715471 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.746031 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.755006 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-ovsdbserver-sb\") pod \"1f3a53b3-e907-4cff-ad72-d6a217bea837\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.755202 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-dns-svc\") pod \"1f3a53b3-e907-4cff-ad72-d6a217bea837\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.755242 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-config\") pod \"1f3a53b3-e907-4cff-ad72-d6a217bea837\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.755348 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-ovsdbserver-nb\") pod \"1f3a53b3-e907-4cff-ad72-d6a217bea837\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.755402 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dfvt\" (UniqueName: \"kubernetes.io/projected/1f3a53b3-e907-4cff-ad72-d6a217bea837-kube-api-access-4dfvt\") pod \"1f3a53b3-e907-4cff-ad72-d6a217bea837\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.755425 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-dns-swift-storage-0\") pod \"1f3a53b3-e907-4cff-ad72-d6a217bea837\" (UID: \"1f3a53b3-e907-4cff-ad72-d6a217bea837\") " Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.786213 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f3a53b3-e907-4cff-ad72-d6a217bea837-kube-api-access-4dfvt" (OuterVolumeSpecName: "kube-api-access-4dfvt") pod "1f3a53b3-e907-4cff-ad72-d6a217bea837" (UID: "1f3a53b3-e907-4cff-ad72-d6a217bea837"). InnerVolumeSpecName "kube-api-access-4dfvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.795686 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-78dbfb6845-276zt"] Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.827414 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-q9f5f"] Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.857256 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-dns-svc\") pod \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.857336 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-ovsdbserver-sb\") pod \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.857411 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-dns-swift-storage-0\") pod \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.857496 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-ovsdbserver-nb\") pod \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.857596 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpzct\" (UniqueName: \"kubernetes.io/projected/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-kube-api-access-mpzct\") pod \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.857622 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-config\") pod \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\" (UID: \"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3\") " Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.868426 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dfvt\" (UniqueName: \"kubernetes.io/projected/1f3a53b3-e907-4cff-ad72-d6a217bea837-kube-api-access-4dfvt\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.874222 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-rh6dp"] Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.893112 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-jf5f9"] Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.900921 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-69lhb"] Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.913067 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-kube-api-access-mpzct" (OuterVolumeSpecName: "kube-api-access-mpzct") pod "8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3" (UID: "8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3"). InnerVolumeSpecName "kube-api-access-mpzct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.915519 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-rp67d"] Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.933322 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5874b7f477-6z4bj"] Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.933632 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3" (UID: "8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:14 crc kubenswrapper[4899]: W1003 08:56:14.940231 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda332aa78_c64e_46f2_b2a0_6cf8be20fe4c.slice/crio-d532184f0bcfa04e8e024dd7b40287fbf8d19683beac56522136b3b2d65ac53c WatchSource:0}: Error finding container d532184f0bcfa04e8e024dd7b40287fbf8d19683beac56522136b3b2d65ac53c: Status 404 returned error can't find the container with id d532184f0bcfa04e8e024dd7b40287fbf8d19683beac56522136b3b2d65ac53c Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.969880 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.969926 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpzct\" (UniqueName: \"kubernetes.io/projected/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-kube-api-access-mpzct\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.971760 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1f3a53b3-e907-4cff-ad72-d6a217bea837" (UID: "1f3a53b3-e907-4cff-ad72-d6a217bea837"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.981671 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.982453 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3" (UID: "8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.992069 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1f3a53b3-e907-4cff-ad72-d6a217bea837" (UID: "1f3a53b3-e907-4cff-ad72-d6a217bea837"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:14 crc kubenswrapper[4899]: I1003 08:56:14.994794 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1f3a53b3-e907-4cff-ad72-d6a217bea837" (UID: "1f3a53b3-e907-4cff-ad72-d6a217bea837"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.011706 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-config" (OuterVolumeSpecName: "config") pod "1f3a53b3-e907-4cff-ad72-d6a217bea837" (UID: "1f3a53b3-e907-4cff-ad72-d6a217bea837"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.016685 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3" (UID: "8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.026359 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-config" (OuterVolumeSpecName: "config") pod "8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3" (UID: "8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.030672 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1f3a53b3-e907-4cff-ad72-d6a217bea837" (UID: "1f3a53b3-e907-4cff-ad72-d6a217bea837"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.052480 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3" (UID: "8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.072138 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.072177 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.072189 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.072198 4899 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.072207 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.072215 4899 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.072225 4899 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.072233 4899 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.072241 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f3a53b3-e907-4cff-ad72-d6a217bea837-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.141534 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ee3630d8-5170-449b-aefc-4c5443da6f9f","Type":"ContainerStarted","Data":"0fbf8c8347f0e913f753be318432bf519d71b6b8cb05b5b546275e214e77a38c"} Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.143961 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" event={"ID":"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c","Type":"ContainerStarted","Data":"d532184f0bcfa04e8e024dd7b40287fbf8d19683beac56522136b3b2d65ac53c"} Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.147484 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-69lhb" event={"ID":"4663e6c0-d0e2-49f9-a457-8bc02fefa635","Type":"ContainerStarted","Data":"6601fbcb359a4f9e20e53a965724a9de1be5d88bd9cef04d2ec14058931b60fc"} Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.152680 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-q9f5f" event={"ID":"5545cd7a-7849-48e5-91f3-6a3a8d51e665","Type":"ContainerStarted","Data":"2963876c2a63615a3db418901840160a2bc9aad69e0acefd1ef7eacb672982bb"} Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.164816 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78dbfb6845-276zt" event={"ID":"190b54d9-49f3-4d1a-aac5-ab680fafa605","Type":"ContainerStarted","Data":"ea08bd4bc83ab5bd0010902d9143901cef477c4683b60e483e3cb38762d7eca2"} Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.168457 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8dc2bbb6-802d-4713-935a-353136d48619","Type":"ContainerStarted","Data":"ca2179b0fb2273286072b1d0f3864b9a38705b4a5115caed667d98e24b85da17"} Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.170675 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jf5f9" event={"ID":"5d0954fe-c339-493b-a2ca-5d30b54bc603","Type":"ContainerStarted","Data":"edcfe3cbf300022194c92c2212fdc85974a5e91c2994791b2d711a07b397fc1b"} Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.180807 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" event={"ID":"8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3","Type":"ContainerDied","Data":"b76bd7a5ffe4fa91d8b03ce5c14d940a2596fe048e23554972a3677f1417df0c"} Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.180858 4899 scope.go:117] "RemoveContainer" containerID="38b6a02fd76450a4794a552b9b28a8f2a069a7ebfee1bd68ce47405ca12615d8" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.180866 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-9k4zg" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.184076 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" event={"ID":"1f3a53b3-e907-4cff-ad72-d6a217bea837","Type":"ContainerDied","Data":"70edf8f1cd4369113f5bbdd43beea53d399e1f1fbfae3381ee1ad5a6f2d9d34e"} Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.184171 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-zrlh8" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.190037 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5874b7f477-6z4bj" event={"ID":"3dd81aae-6922-43f5-83a6-c81a06bfedea","Type":"ContainerStarted","Data":"ebf5e437698e9610b4663293a9bd24ff18e123e4d04b399fc797d18867f65449"} Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.192770 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-rh6dp" event={"ID":"c2decdc4-50c4-4370-8551-fe73d6918bcd","Type":"ContainerStarted","Data":"6b2f90774db6fb4d91595965f4c892211148e0c367c4cbfdd9a317b88e9fbfb5"} Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.304217 4899 scope.go:117] "RemoveContainer" containerID="740c83954c0bb7005af958c1be9b3f097c78c2d38572ce378efa8543f8bc92c2" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.381774 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-zrlh8"] Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.388945 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-zrlh8"] Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.406584 4899 scope.go:117] "RemoveContainer" containerID="7b81b25b07738fd729abcee40dca5d2fc3b349d3876dd3ebfe5f5eea9072cf7d" Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.449424 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-9k4zg"] Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.456000 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-9k4zg"] Oct 03 08:56:15 crc kubenswrapper[4899]: I1003 08:56:15.920369 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 08:56:15 crc kubenswrapper[4899]: W1003 08:56:15.960305 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod953831ef_1753_4bd9_896d_a769b212b0ae.slice/crio-c93993bf71c5ad6f03f515f4aefbb3853f156a7607a2e6c1a5d4fc9f621aea37 WatchSource:0}: Error finding container c93993bf71c5ad6f03f515f4aefbb3853f156a7607a2e6c1a5d4fc9f621aea37: Status 404 returned error can't find the container with id c93993bf71c5ad6f03f515f4aefbb3853f156a7607a2e6c1a5d4fc9f621aea37 Oct 03 08:56:16 crc kubenswrapper[4899]: I1003 08:56:16.236642 4899 generic.go:334] "Generic (PLEG): container finished" podID="a332aa78-c64e-46f2-b2a0-6cf8be20fe4c" containerID="a0c0fe18c71a26d072505ed96e8dadb3145c15aa34ac0833ef7ed22eef4db970" exitCode=0 Oct 03 08:56:16 crc kubenswrapper[4899]: I1003 08:56:16.236842 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" event={"ID":"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c","Type":"ContainerDied","Data":"a0c0fe18c71a26d072505ed96e8dadb3145c15aa34ac0833ef7ed22eef4db970"} Oct 03 08:56:16 crc kubenswrapper[4899]: I1003 08:56:16.240846 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"953831ef-1753-4bd9-896d-a769b212b0ae","Type":"ContainerStarted","Data":"c93993bf71c5ad6f03f515f4aefbb3853f156a7607a2e6c1a5d4fc9f621aea37"} Oct 03 08:56:16 crc kubenswrapper[4899]: I1003 08:56:16.293199 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-rh6dp" event={"ID":"c2decdc4-50c4-4370-8551-fe73d6918bcd","Type":"ContainerStarted","Data":"12ff563a9c461311dacf4abc2193e35815a9dccd835f6edfe2ea00f82baecc3a"} Oct 03 08:56:16 crc kubenswrapper[4899]: I1003 08:56:16.332558 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ee3630d8-5170-449b-aefc-4c5443da6f9f","Type":"ContainerStarted","Data":"02a1cc6f41530715dbbd649b9906bbb19a5a1d299f5cf9afbbd38020628f5d5c"} Oct 03 08:56:16 crc kubenswrapper[4899]: I1003 08:56:16.334714 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-rh6dp" podStartSLOduration=6.334702463 podStartE2EDuration="6.334702463s" podCreationTimestamp="2025-10-03 08:56:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:56:16.334486615 +0000 UTC m=+950.441971588" watchObservedRunningTime="2025-10-03 08:56:16.334702463 +0000 UTC m=+950.442187416" Oct 03 08:56:16 crc kubenswrapper[4899]: I1003 08:56:16.548665 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f3a53b3-e907-4cff-ad72-d6a217bea837" path="/var/lib/kubelet/pods/1f3a53b3-e907-4cff-ad72-d6a217bea837/volumes" Oct 03 08:56:16 crc kubenswrapper[4899]: I1003 08:56:16.549306 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3" path="/var/lib/kubelet/pods/8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3/volumes" Oct 03 08:56:17 crc kubenswrapper[4899]: I1003 08:56:17.366552 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ee3630d8-5170-449b-aefc-4c5443da6f9f","Type":"ContainerStarted","Data":"27c946d17e54df3f5fd08b577264dac1cc573c6b0d3964ee95759afdecc0e0c3"} Oct 03 08:56:17 crc kubenswrapper[4899]: I1003 08:56:17.366696 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ee3630d8-5170-449b-aefc-4c5443da6f9f" containerName="glance-log" containerID="cri-o://02a1cc6f41530715dbbd649b9906bbb19a5a1d299f5cf9afbbd38020628f5d5c" gracePeriod=30 Oct 03 08:56:17 crc kubenswrapper[4899]: I1003 08:56:17.366849 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ee3630d8-5170-449b-aefc-4c5443da6f9f" containerName="glance-httpd" containerID="cri-o://27c946d17e54df3f5fd08b577264dac1cc573c6b0d3964ee95759afdecc0e0c3" gracePeriod=30 Oct 03 08:56:17 crc kubenswrapper[4899]: I1003 08:56:17.370666 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" event={"ID":"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c","Type":"ContainerStarted","Data":"262e63396b2686aaaf8ba54c61164ff36f2bc5c2eee7937e8a9d7011a99db162"} Oct 03 08:56:17 crc kubenswrapper[4899]: I1003 08:56:17.370804 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:17 crc kubenswrapper[4899]: I1003 08:56:17.374371 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"953831ef-1753-4bd9-896d-a769b212b0ae","Type":"ContainerStarted","Data":"771387d4685d19b31d35bec31c5a63dfb6139e8817b7586b1a9c8af6e7f016be"} Oct 03 08:56:17 crc kubenswrapper[4899]: I1003 08:56:17.406717 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=7.406695176 podStartE2EDuration="7.406695176s" podCreationTimestamp="2025-10-03 08:56:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:56:17.397519157 +0000 UTC m=+951.505004110" watchObservedRunningTime="2025-10-03 08:56:17.406695176 +0000 UTC m=+951.514180129" Oct 03 08:56:18 crc kubenswrapper[4899]: I1003 08:56:18.387411 4899 generic.go:334] "Generic (PLEG): container finished" podID="ee3630d8-5170-449b-aefc-4c5443da6f9f" containerID="27c946d17e54df3f5fd08b577264dac1cc573c6b0d3964ee95759afdecc0e0c3" exitCode=143 Oct 03 08:56:18 crc kubenswrapper[4899]: I1003 08:56:18.388151 4899 generic.go:334] "Generic (PLEG): container finished" podID="ee3630d8-5170-449b-aefc-4c5443da6f9f" containerID="02a1cc6f41530715dbbd649b9906bbb19a5a1d299f5cf9afbbd38020628f5d5c" exitCode=143 Oct 03 08:56:18 crc kubenswrapper[4899]: I1003 08:56:18.387822 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ee3630d8-5170-449b-aefc-4c5443da6f9f","Type":"ContainerDied","Data":"27c946d17e54df3f5fd08b577264dac1cc573c6b0d3964ee95759afdecc0e0c3"} Oct 03 08:56:18 crc kubenswrapper[4899]: I1003 08:56:18.388765 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ee3630d8-5170-449b-aefc-4c5443da6f9f","Type":"ContainerDied","Data":"02a1cc6f41530715dbbd649b9906bbb19a5a1d299f5cf9afbbd38020628f5d5c"} Oct 03 08:56:18 crc kubenswrapper[4899]: I1003 08:56:18.395065 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="953831ef-1753-4bd9-896d-a769b212b0ae" containerName="glance-log" containerID="cri-o://771387d4685d19b31d35bec31c5a63dfb6139e8817b7586b1a9c8af6e7f016be" gracePeriod=30 Oct 03 08:56:18 crc kubenswrapper[4899]: I1003 08:56:18.395460 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"953831ef-1753-4bd9-896d-a769b212b0ae","Type":"ContainerStarted","Data":"40d11dbfd51809b83c8e32413f91fa2902f3ecfe50c3404bdbbd259bb15e52ef"} Oct 03 08:56:18 crc kubenswrapper[4899]: I1003 08:56:18.395771 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="953831ef-1753-4bd9-896d-a769b212b0ae" containerName="glance-httpd" containerID="cri-o://40d11dbfd51809b83c8e32413f91fa2902f3ecfe50c3404bdbbd259bb15e52ef" gracePeriod=30 Oct 03 08:56:18 crc kubenswrapper[4899]: I1003 08:56:18.433844 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=8.433157455 podStartE2EDuration="8.433157455s" podCreationTimestamp="2025-10-03 08:56:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:56:18.415686685 +0000 UTC m=+952.523171638" watchObservedRunningTime="2025-10-03 08:56:18.433157455 +0000 UTC m=+952.540642408" Oct 03 08:56:18 crc kubenswrapper[4899]: I1003 08:56:18.437628 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" podStartSLOduration=8.437614415 podStartE2EDuration="8.437614415s" podCreationTimestamp="2025-10-03 08:56:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:56:17.451432515 +0000 UTC m=+951.558917478" watchObservedRunningTime="2025-10-03 08:56:18.437614415 +0000 UTC m=+952.545099368" Oct 03 08:56:18 crc kubenswrapper[4899]: E1003 08:56:18.582103 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod953831ef_1753_4bd9_896d_a769b212b0ae.slice/crio-conmon-40d11dbfd51809b83c8e32413f91fa2902f3ecfe50c3404bdbbd259bb15e52ef.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod953831ef_1753_4bd9_896d_a769b212b0ae.slice/crio-771387d4685d19b31d35bec31c5a63dfb6139e8817b7586b1a9c8af6e7f016be.scope\": RecentStats: unable to find data in memory cache]" Oct 03 08:56:19 crc kubenswrapper[4899]: I1003 08:56:19.425252 4899 generic.go:334] "Generic (PLEG): container finished" podID="953831ef-1753-4bd9-896d-a769b212b0ae" containerID="40d11dbfd51809b83c8e32413f91fa2902f3ecfe50c3404bdbbd259bb15e52ef" exitCode=143 Oct 03 08:56:19 crc kubenswrapper[4899]: I1003 08:56:19.425471 4899 generic.go:334] "Generic (PLEG): container finished" podID="953831ef-1753-4bd9-896d-a769b212b0ae" containerID="771387d4685d19b31d35bec31c5a63dfb6139e8817b7586b1a9c8af6e7f016be" exitCode=143 Oct 03 08:56:19 crc kubenswrapper[4899]: I1003 08:56:19.425353 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"953831ef-1753-4bd9-896d-a769b212b0ae","Type":"ContainerDied","Data":"40d11dbfd51809b83c8e32413f91fa2902f3ecfe50c3404bdbbd259bb15e52ef"} Oct 03 08:56:19 crc kubenswrapper[4899]: I1003 08:56:19.425587 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"953831ef-1753-4bd9-896d-a769b212b0ae","Type":"ContainerDied","Data":"771387d4685d19b31d35bec31c5a63dfb6139e8817b7586b1a9c8af6e7f016be"} Oct 03 08:56:19 crc kubenswrapper[4899]: I1003 08:56:19.428769 4899 generic.go:334] "Generic (PLEG): container finished" podID="00702fc3-6e90-4171-85c6-03f71bb45256" containerID="ffa3895e7d7efddf5d57593af3042344fd7f1f73bbc572ece7d300f48722c796" exitCode=0 Oct 03 08:56:19 crc kubenswrapper[4899]: I1003 08:56:19.428807 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fp7gs" event={"ID":"00702fc3-6e90-4171-85c6-03f71bb45256","Type":"ContainerDied","Data":"ffa3895e7d7efddf5d57593af3042344fd7f1f73bbc572ece7d300f48722c796"} Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.490288 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-78dbfb6845-276zt"] Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.521966 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5bcb4b4796-x4jmr"] Oct 03 08:56:20 crc kubenswrapper[4899]: E1003 08:56:20.522475 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f3a53b3-e907-4cff-ad72-d6a217bea837" containerName="init" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.522490 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f3a53b3-e907-4cff-ad72-d6a217bea837" containerName="init" Oct 03 08:56:20 crc kubenswrapper[4899]: E1003 08:56:20.522503 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3" containerName="init" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.522509 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3" containerName="init" Oct 03 08:56:20 crc kubenswrapper[4899]: E1003 08:56:20.522525 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f3a53b3-e907-4cff-ad72-d6a217bea837" containerName="dnsmasq-dns" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.522532 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f3a53b3-e907-4cff-ad72-d6a217bea837" containerName="dnsmasq-dns" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.522740 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b92f0ed-4578-4bbd-b7f3-abbe7fbe84a3" containerName="init" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.522752 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f3a53b3-e907-4cff-ad72-d6a217bea837" containerName="dnsmasq-dns" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.523780 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.533924 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.578483 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5bcb4b4796-x4jmr"] Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.602840 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-horizon-secret-key\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.603169 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-config-data\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.603238 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-scripts\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.603330 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-combined-ca-bundle\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.603360 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-horizon-tls-certs\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.603417 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-logs\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.603473 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfb27\" (UniqueName: \"kubernetes.io/projected/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-kube-api-access-kfb27\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.608079 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5874b7f477-6z4bj"] Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.684062 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7f5ccd89b4-5dfm2"] Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.701516 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.731270 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7f5ccd89b4-5dfm2"] Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.731638 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-logs\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.731705 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfb27\" (UniqueName: \"kubernetes.io/projected/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-kube-api-access-kfb27\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.731758 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-horizon-secret-key\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.731842 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-config-data\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.731875 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-scripts\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.731935 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-combined-ca-bundle\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.731955 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-horizon-tls-certs\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.732356 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-logs\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.738090 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-scripts\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.740998 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-config-data\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.748339 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-horizon-secret-key\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.748866 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfb27\" (UniqueName: \"kubernetes.io/projected/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-kube-api-access-kfb27\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.753668 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-combined-ca-bundle\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.755912 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-horizon-tls-certs\") pod \"horizon-5bcb4b4796-x4jmr\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.833161 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf908711-a33e-40be-b5a0-c82254721d41-combined-ca-bundle\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.833258 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bf908711-a33e-40be-b5a0-c82254721d41-horizon-secret-key\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.833309 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bf908711-a33e-40be-b5a0-c82254721d41-scripts\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.833380 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t6k2\" (UniqueName: \"kubernetes.io/projected/bf908711-a33e-40be-b5a0-c82254721d41-kube-api-access-7t6k2\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.833397 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf908711-a33e-40be-b5a0-c82254721d41-logs\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.833412 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bf908711-a33e-40be-b5a0-c82254721d41-config-data\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.833462 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf908711-a33e-40be-b5a0-c82254721d41-horizon-tls-certs\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.877557 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.934728 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t6k2\" (UniqueName: \"kubernetes.io/projected/bf908711-a33e-40be-b5a0-c82254721d41-kube-api-access-7t6k2\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.934774 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf908711-a33e-40be-b5a0-c82254721d41-logs\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.934797 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bf908711-a33e-40be-b5a0-c82254721d41-config-data\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.934836 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf908711-a33e-40be-b5a0-c82254721d41-horizon-tls-certs\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.934870 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf908711-a33e-40be-b5a0-c82254721d41-combined-ca-bundle\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.934923 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bf908711-a33e-40be-b5a0-c82254721d41-horizon-secret-key\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.934955 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bf908711-a33e-40be-b5a0-c82254721d41-scripts\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.935639 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bf908711-a33e-40be-b5a0-c82254721d41-scripts\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.936126 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf908711-a33e-40be-b5a0-c82254721d41-logs\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.937136 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bf908711-a33e-40be-b5a0-c82254721d41-config-data\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.940963 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bf908711-a33e-40be-b5a0-c82254721d41-horizon-secret-key\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.941073 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf908711-a33e-40be-b5a0-c82254721d41-horizon-tls-certs\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.949248 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf908711-a33e-40be-b5a0-c82254721d41-combined-ca-bundle\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:20 crc kubenswrapper[4899]: I1003 08:56:20.959400 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t6k2\" (UniqueName: \"kubernetes.io/projected/bf908711-a33e-40be-b5a0-c82254721d41-kube-api-access-7t6k2\") pod \"horizon-7f5ccd89b4-5dfm2\" (UID: \"bf908711-a33e-40be-b5a0-c82254721d41\") " pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:21 crc kubenswrapper[4899]: I1003 08:56:21.050787 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:26 crc kubenswrapper[4899]: I1003 08:56:26.165283 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:26 crc kubenswrapper[4899]: I1003 08:56:26.255043 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-nj5rb"] Oct 03 08:56:26 crc kubenswrapper[4899]: I1003 08:56:26.255583 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" podUID="dbbacbbc-f946-45cc-abdb-0389500e5c19" containerName="dnsmasq-dns" containerID="cri-o://2d57925db20a8b04d78c247124c30c43b01a5af3c3cb897c7a01aa2ea9616655" gracePeriod=10 Oct 03 08:56:26 crc kubenswrapper[4899]: I1003 08:56:26.504965 4899 generic.go:334] "Generic (PLEG): container finished" podID="dbbacbbc-f946-45cc-abdb-0389500e5c19" containerID="2d57925db20a8b04d78c247124c30c43b01a5af3c3cb897c7a01aa2ea9616655" exitCode=0 Oct 03 08:56:26 crc kubenswrapper[4899]: I1003 08:56:26.505049 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" event={"ID":"dbbacbbc-f946-45cc-abdb-0389500e5c19","Type":"ContainerDied","Data":"2d57925db20a8b04d78c247124c30c43b01a5af3c3cb897c7a01aa2ea9616655"} Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.484762 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.520710 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ee3630d8-5170-449b-aefc-4c5443da6f9f","Type":"ContainerDied","Data":"0fbf8c8347f0e913f753be318432bf519d71b6b8cb05b5b546275e214e77a38c"} Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.520754 4899 scope.go:117] "RemoveContainer" containerID="27c946d17e54df3f5fd08b577264dac1cc573c6b0d3964ee95759afdecc0e0c3" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.520878 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.563652 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ee3630d8-5170-449b-aefc-4c5443da6f9f-httpd-run\") pod \"ee3630d8-5170-449b-aefc-4c5443da6f9f\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.563803 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-internal-tls-certs\") pod \"ee3630d8-5170-449b-aefc-4c5443da6f9f\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.563853 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzts7\" (UniqueName: \"kubernetes.io/projected/ee3630d8-5170-449b-aefc-4c5443da6f9f-kube-api-access-hzts7\") pod \"ee3630d8-5170-449b-aefc-4c5443da6f9f\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.563870 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-config-data\") pod \"ee3630d8-5170-449b-aefc-4c5443da6f9f\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.563905 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-scripts\") pod \"ee3630d8-5170-449b-aefc-4c5443da6f9f\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.564091 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee3630d8-5170-449b-aefc-4c5443da6f9f-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ee3630d8-5170-449b-aefc-4c5443da6f9f" (UID: "ee3630d8-5170-449b-aefc-4c5443da6f9f"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.564192 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-combined-ca-bundle\") pod \"ee3630d8-5170-449b-aefc-4c5443da6f9f\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.564245 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee3630d8-5170-449b-aefc-4c5443da6f9f-logs\") pod \"ee3630d8-5170-449b-aefc-4c5443da6f9f\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.564299 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ee3630d8-5170-449b-aefc-4c5443da6f9f\" (UID: \"ee3630d8-5170-449b-aefc-4c5443da6f9f\") " Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.564816 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee3630d8-5170-449b-aefc-4c5443da6f9f-logs" (OuterVolumeSpecName: "logs") pod "ee3630d8-5170-449b-aefc-4c5443da6f9f" (UID: "ee3630d8-5170-449b-aefc-4c5443da6f9f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.565114 4899 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee3630d8-5170-449b-aefc-4c5443da6f9f-logs\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.565132 4899 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ee3630d8-5170-449b-aefc-4c5443da6f9f-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.570325 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-scripts" (OuterVolumeSpecName: "scripts") pod "ee3630d8-5170-449b-aefc-4c5443da6f9f" (UID: "ee3630d8-5170-449b-aefc-4c5443da6f9f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.571093 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "ee3630d8-5170-449b-aefc-4c5443da6f9f" (UID: "ee3630d8-5170-449b-aefc-4c5443da6f9f"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.574842 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee3630d8-5170-449b-aefc-4c5443da6f9f-kube-api-access-hzts7" (OuterVolumeSpecName: "kube-api-access-hzts7") pod "ee3630d8-5170-449b-aefc-4c5443da6f9f" (UID: "ee3630d8-5170-449b-aefc-4c5443da6f9f"). InnerVolumeSpecName "kube-api-access-hzts7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.590087 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ee3630d8-5170-449b-aefc-4c5443da6f9f" (UID: "ee3630d8-5170-449b-aefc-4c5443da6f9f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.626204 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ee3630d8-5170-449b-aefc-4c5443da6f9f" (UID: "ee3630d8-5170-449b-aefc-4c5443da6f9f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.648058 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-config-data" (OuterVolumeSpecName: "config-data") pod "ee3630d8-5170-449b-aefc-4c5443da6f9f" (UID: "ee3630d8-5170-449b-aefc-4c5443da6f9f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.666551 4899 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.666583 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzts7\" (UniqueName: \"kubernetes.io/projected/ee3630d8-5170-449b-aefc-4c5443da6f9f-kube-api-access-hzts7\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.666593 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.666601 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.666610 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee3630d8-5170-449b-aefc-4c5443da6f9f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.666635 4899 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.684227 4899 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.769786 4899 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.852945 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.862692 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.893847 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 08:56:27 crc kubenswrapper[4899]: E1003 08:56:27.894706 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee3630d8-5170-449b-aefc-4c5443da6f9f" containerName="glance-log" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.894775 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee3630d8-5170-449b-aefc-4c5443da6f9f" containerName="glance-log" Oct 03 08:56:27 crc kubenswrapper[4899]: E1003 08:56:27.894838 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee3630d8-5170-449b-aefc-4c5443da6f9f" containerName="glance-httpd" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.894901 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee3630d8-5170-449b-aefc-4c5443da6f9f" containerName="glance-httpd" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.908629 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee3630d8-5170-449b-aefc-4c5443da6f9f" containerName="glance-httpd" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.908836 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee3630d8-5170-449b-aefc-4c5443da6f9f" containerName="glance-log" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.913801 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.918373 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.918456 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.924866 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.975406 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.975504 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/894a907e-c584-4671-9ba8-e4b1df804b5b-logs\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.975637 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/894a907e-c584-4671-9ba8-e4b1df804b5b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.975668 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.975715 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kf9kd\" (UniqueName: \"kubernetes.io/projected/894a907e-c584-4671-9ba8-e4b1df804b5b-kube-api-access-kf9kd\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.975746 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.975767 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:27 crc kubenswrapper[4899]: I1003 08:56:27.975783 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.080345 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/894a907e-c584-4671-9ba8-e4b1df804b5b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.080427 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.080502 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kf9kd\" (UniqueName: \"kubernetes.io/projected/894a907e-c584-4671-9ba8-e4b1df804b5b-kube-api-access-kf9kd\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.080557 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.080581 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.080595 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.080637 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.080662 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/894a907e-c584-4671-9ba8-e4b1df804b5b-logs\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.080760 4899 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.081154 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/894a907e-c584-4671-9ba8-e4b1df804b5b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.081465 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/894a907e-c584-4671-9ba8-e4b1df804b5b-logs\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.090503 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.096641 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.098463 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.099000 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.102657 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kf9kd\" (UniqueName: \"kubernetes.io/projected/894a907e-c584-4671-9ba8-e4b1df804b5b-kube-api-access-kf9kd\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.130130 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.246276 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 08:56:28 crc kubenswrapper[4899]: I1003 08:56:28.537764 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee3630d8-5170-449b-aefc-4c5443da6f9f" path="/var/lib/kubelet/pods/ee3630d8-5170-449b-aefc-4c5443da6f9f/volumes" Oct 03 08:56:29 crc kubenswrapper[4899]: E1003 08:56:29.979683 4899 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Oct 03 08:56:29 crc kubenswrapper[4899]: E1003 08:56:29.980227 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-grmzz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-jf5f9_openstack(5d0954fe-c339-493b-a2ca-5d30b54bc603): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 08:56:29 crc kubenswrapper[4899]: E1003 08:56:29.981398 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-jf5f9" podUID="5d0954fe-c339-493b-a2ca-5d30b54bc603" Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.051007 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.131412 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-fernet-keys\") pod \"00702fc3-6e90-4171-85c6-03f71bb45256\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.131566 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-combined-ca-bundle\") pod \"00702fc3-6e90-4171-85c6-03f71bb45256\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.131680 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-scripts\") pod \"00702fc3-6e90-4171-85c6-03f71bb45256\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.131706 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2chgw\" (UniqueName: \"kubernetes.io/projected/00702fc3-6e90-4171-85c6-03f71bb45256-kube-api-access-2chgw\") pod \"00702fc3-6e90-4171-85c6-03f71bb45256\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.131730 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-config-data\") pod \"00702fc3-6e90-4171-85c6-03f71bb45256\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.131747 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-credential-keys\") pod \"00702fc3-6e90-4171-85c6-03f71bb45256\" (UID: \"00702fc3-6e90-4171-85c6-03f71bb45256\") " Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.138579 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "00702fc3-6e90-4171-85c6-03f71bb45256" (UID: "00702fc3-6e90-4171-85c6-03f71bb45256"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.141260 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "00702fc3-6e90-4171-85c6-03f71bb45256" (UID: "00702fc3-6e90-4171-85c6-03f71bb45256"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.141349 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-scripts" (OuterVolumeSpecName: "scripts") pod "00702fc3-6e90-4171-85c6-03f71bb45256" (UID: "00702fc3-6e90-4171-85c6-03f71bb45256"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.141585 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00702fc3-6e90-4171-85c6-03f71bb45256-kube-api-access-2chgw" (OuterVolumeSpecName: "kube-api-access-2chgw") pod "00702fc3-6e90-4171-85c6-03f71bb45256" (UID: "00702fc3-6e90-4171-85c6-03f71bb45256"). InnerVolumeSpecName "kube-api-access-2chgw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.168336 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "00702fc3-6e90-4171-85c6-03f71bb45256" (UID: "00702fc3-6e90-4171-85c6-03f71bb45256"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.197615 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-config-data" (OuterVolumeSpecName: "config-data") pod "00702fc3-6e90-4171-85c6-03f71bb45256" (UID: "00702fc3-6e90-4171-85c6-03f71bb45256"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.234125 4899 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.234158 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.234168 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.234176 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2chgw\" (UniqueName: \"kubernetes.io/projected/00702fc3-6e90-4171-85c6-03f71bb45256-kube-api-access-2chgw\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.234185 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.234193 4899 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/00702fc3-6e90-4171-85c6-03f71bb45256-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.562672 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fp7gs" Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.562685 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fp7gs" event={"ID":"00702fc3-6e90-4171-85c6-03f71bb45256","Type":"ContainerDied","Data":"0ab4a7f15c37af4f84440d62001a83056f515b5cac4f206b53956cc00644e50e"} Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.562755 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ab4a7f15c37af4f84440d62001a83056f515b5cac4f206b53956cc00644e50e" Oct 03 08:56:30 crc kubenswrapper[4899]: E1003 08:56:30.564247 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-jf5f9" podUID="5d0954fe-c339-493b-a2ca-5d30b54bc603" Oct 03 08:56:30 crc kubenswrapper[4899]: I1003 08:56:30.931139 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" podUID="dbbacbbc-f946-45cc-abdb-0389500e5c19" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.166491 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-fp7gs"] Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.174623 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-fp7gs"] Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.254221 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-5rpwq"] Oct 03 08:56:31 crc kubenswrapper[4899]: E1003 08:56:31.254587 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00702fc3-6e90-4171-85c6-03f71bb45256" containerName="keystone-bootstrap" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.254611 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="00702fc3-6e90-4171-85c6-03f71bb45256" containerName="keystone-bootstrap" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.254798 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="00702fc3-6e90-4171-85c6-03f71bb45256" containerName="keystone-bootstrap" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.255370 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.258044 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6hp7k" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.258324 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.258529 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.259710 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.268009 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-5rpwq"] Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.365863 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-fernet-keys\") pod \"keystone-bootstrap-5rpwq\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.365949 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-combined-ca-bundle\") pod \"keystone-bootstrap-5rpwq\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.366021 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-scripts\") pod \"keystone-bootstrap-5rpwq\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.366160 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-credential-keys\") pod \"keystone-bootstrap-5rpwq\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.366215 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b8wv\" (UniqueName: \"kubernetes.io/projected/194b2a4b-c8aa-4590-a400-f54b03904ecf-kube-api-access-2b8wv\") pod \"keystone-bootstrap-5rpwq\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.366308 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-config-data\") pod \"keystone-bootstrap-5rpwq\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.468120 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-fernet-keys\") pod \"keystone-bootstrap-5rpwq\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.468191 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-combined-ca-bundle\") pod \"keystone-bootstrap-5rpwq\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.468226 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-scripts\") pod \"keystone-bootstrap-5rpwq\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.468253 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-credential-keys\") pod \"keystone-bootstrap-5rpwq\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.468275 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b8wv\" (UniqueName: \"kubernetes.io/projected/194b2a4b-c8aa-4590-a400-f54b03904ecf-kube-api-access-2b8wv\") pod \"keystone-bootstrap-5rpwq\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.468316 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-config-data\") pod \"keystone-bootstrap-5rpwq\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.474357 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-combined-ca-bundle\") pod \"keystone-bootstrap-5rpwq\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.477455 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-scripts\") pod \"keystone-bootstrap-5rpwq\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.477713 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-credential-keys\") pod \"keystone-bootstrap-5rpwq\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.477749 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-fernet-keys\") pod \"keystone-bootstrap-5rpwq\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.477925 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-config-data\") pod \"keystone-bootstrap-5rpwq\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.485326 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b8wv\" (UniqueName: \"kubernetes.io/projected/194b2a4b-c8aa-4590-a400-f54b03904ecf-kube-api-access-2b8wv\") pod \"keystone-bootstrap-5rpwq\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:31 crc kubenswrapper[4899]: I1003 08:56:31.575842 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:32 crc kubenswrapper[4899]: I1003 08:56:32.536156 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00702fc3-6e90-4171-85c6-03f71bb45256" path="/var/lib/kubelet/pods/00702fc3-6e90-4171-85c6-03f71bb45256/volumes" Oct 03 08:56:32 crc kubenswrapper[4899]: E1003 08:56:32.757262 4899 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Oct 03 08:56:32 crc kubenswrapper[4899]: E1003 08:56:32.757642 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8x4h9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-69lhb_openstack(4663e6c0-d0e2-49f9-a457-8bc02fefa635): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 08:56:32 crc kubenswrapper[4899]: E1003 08:56:32.759165 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-69lhb" podUID="4663e6c0-d0e2-49f9-a457-8bc02fefa635" Oct 03 08:56:33 crc kubenswrapper[4899]: E1003 08:56:33.588297 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-69lhb" podUID="4663e6c0-d0e2-49f9-a457-8bc02fefa635" Oct 03 08:56:35 crc kubenswrapper[4899]: I1003 08:56:35.616058 4899 generic.go:334] "Generic (PLEG): container finished" podID="c2decdc4-50c4-4370-8551-fe73d6918bcd" containerID="12ff563a9c461311dacf4abc2193e35815a9dccd835f6edfe2ea00f82baecc3a" exitCode=0 Oct 03 08:56:35 crc kubenswrapper[4899]: I1003 08:56:35.616100 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-rh6dp" event={"ID":"c2decdc4-50c4-4370-8551-fe73d6918bcd","Type":"ContainerDied","Data":"12ff563a9c461311dacf4abc2193e35815a9dccd835f6edfe2ea00f82baecc3a"} Oct 03 08:56:40 crc kubenswrapper[4899]: I1003 08:56:40.931687 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" podUID="dbbacbbc-f946-45cc-abdb-0389500e5c19" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: i/o timeout" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.175425 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.175472 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.288651 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.297449 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.306691 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-rh6dp" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.337864 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2decdc4-50c4-4370-8551-fe73d6918bcd-combined-ca-bundle\") pod \"c2decdc4-50c4-4370-8551-fe73d6918bcd\" (UID: \"c2decdc4-50c4-4370-8551-fe73d6918bcd\") " Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.337961 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-ovsdbserver-sb\") pod \"dbbacbbc-f946-45cc-abdb-0389500e5c19\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.338027 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-ovsdbserver-nb\") pod \"dbbacbbc-f946-45cc-abdb-0389500e5c19\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.338128 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-dns-svc\") pod \"dbbacbbc-f946-45cc-abdb-0389500e5c19\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.338209 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8gwb\" (UniqueName: \"kubernetes.io/projected/c2decdc4-50c4-4370-8551-fe73d6918bcd-kube-api-access-w8gwb\") pod \"c2decdc4-50c4-4370-8551-fe73d6918bcd\" (UID: \"c2decdc4-50c4-4370-8551-fe73d6918bcd\") " Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.338242 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/953831ef-1753-4bd9-896d-a769b212b0ae-logs\") pod \"953831ef-1753-4bd9-896d-a769b212b0ae\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.338306 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7mp5\" (UniqueName: \"kubernetes.io/projected/953831ef-1753-4bd9-896d-a769b212b0ae-kube-api-access-b7mp5\") pod \"953831ef-1753-4bd9-896d-a769b212b0ae\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.338393 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-scripts\") pod \"953831ef-1753-4bd9-896d-a769b212b0ae\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.338474 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ckt5\" (UniqueName: \"kubernetes.io/projected/dbbacbbc-f946-45cc-abdb-0389500e5c19-kube-api-access-2ckt5\") pod \"dbbacbbc-f946-45cc-abdb-0389500e5c19\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.338510 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-public-tls-certs\") pod \"953831ef-1753-4bd9-896d-a769b212b0ae\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.338572 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-config-data\") pod \"953831ef-1753-4bd9-896d-a769b212b0ae\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.338646 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-combined-ca-bundle\") pod \"953831ef-1753-4bd9-896d-a769b212b0ae\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.338721 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/953831ef-1753-4bd9-896d-a769b212b0ae-httpd-run\") pod \"953831ef-1753-4bd9-896d-a769b212b0ae\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.338748 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"953831ef-1753-4bd9-896d-a769b212b0ae\" (UID: \"953831ef-1753-4bd9-896d-a769b212b0ae\") " Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.338817 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-config\") pod \"dbbacbbc-f946-45cc-abdb-0389500e5c19\" (UID: \"dbbacbbc-f946-45cc-abdb-0389500e5c19\") " Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.339306 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c2decdc4-50c4-4370-8551-fe73d6918bcd-config\") pod \"c2decdc4-50c4-4370-8551-fe73d6918bcd\" (UID: \"c2decdc4-50c4-4370-8551-fe73d6918bcd\") " Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.339959 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/953831ef-1753-4bd9-896d-a769b212b0ae-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "953831ef-1753-4bd9-896d-a769b212b0ae" (UID: "953831ef-1753-4bd9-896d-a769b212b0ae"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.340194 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/953831ef-1753-4bd9-896d-a769b212b0ae-logs" (OuterVolumeSpecName: "logs") pod "953831ef-1753-4bd9-896d-a769b212b0ae" (UID: "953831ef-1753-4bd9-896d-a769b212b0ae"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.347389 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2decdc4-50c4-4370-8551-fe73d6918bcd-kube-api-access-w8gwb" (OuterVolumeSpecName: "kube-api-access-w8gwb") pod "c2decdc4-50c4-4370-8551-fe73d6918bcd" (UID: "c2decdc4-50c4-4370-8551-fe73d6918bcd"). InnerVolumeSpecName "kube-api-access-w8gwb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.349498 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "953831ef-1753-4bd9-896d-a769b212b0ae" (UID: "953831ef-1753-4bd9-896d-a769b212b0ae"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.351115 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-scripts" (OuterVolumeSpecName: "scripts") pod "953831ef-1753-4bd9-896d-a769b212b0ae" (UID: "953831ef-1753-4bd9-896d-a769b212b0ae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.355716 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/953831ef-1753-4bd9-896d-a769b212b0ae-kube-api-access-b7mp5" (OuterVolumeSpecName: "kube-api-access-b7mp5") pod "953831ef-1753-4bd9-896d-a769b212b0ae" (UID: "953831ef-1753-4bd9-896d-a769b212b0ae"). InnerVolumeSpecName "kube-api-access-b7mp5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.369580 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbbacbbc-f946-45cc-abdb-0389500e5c19-kube-api-access-2ckt5" (OuterVolumeSpecName: "kube-api-access-2ckt5") pod "dbbacbbc-f946-45cc-abdb-0389500e5c19" (UID: "dbbacbbc-f946-45cc-abdb-0389500e5c19"). InnerVolumeSpecName "kube-api-access-2ckt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.389197 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2decdc4-50c4-4370-8551-fe73d6918bcd-config" (OuterVolumeSpecName: "config") pod "c2decdc4-50c4-4370-8551-fe73d6918bcd" (UID: "c2decdc4-50c4-4370-8551-fe73d6918bcd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.401675 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2decdc4-50c4-4370-8551-fe73d6918bcd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c2decdc4-50c4-4370-8551-fe73d6918bcd" (UID: "c2decdc4-50c4-4370-8551-fe73d6918bcd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.424226 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-config" (OuterVolumeSpecName: "config") pod "dbbacbbc-f946-45cc-abdb-0389500e5c19" (UID: "dbbacbbc-f946-45cc-abdb-0389500e5c19"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.427957 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "953831ef-1753-4bd9-896d-a769b212b0ae" (UID: "953831ef-1753-4bd9-896d-a769b212b0ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.430231 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dbbacbbc-f946-45cc-abdb-0389500e5c19" (UID: "dbbacbbc-f946-45cc-abdb-0389500e5c19"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.431639 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "953831ef-1753-4bd9-896d-a769b212b0ae" (UID: "953831ef-1753-4bd9-896d-a769b212b0ae"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.441858 4899 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.441916 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8gwb\" (UniqueName: \"kubernetes.io/projected/c2decdc4-50c4-4370-8551-fe73d6918bcd-kube-api-access-w8gwb\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.441931 4899 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/953831ef-1753-4bd9-896d-a769b212b0ae-logs\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.441943 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7mp5\" (UniqueName: \"kubernetes.io/projected/953831ef-1753-4bd9-896d-a769b212b0ae-kube-api-access-b7mp5\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.441953 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.441963 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ckt5\" (UniqueName: \"kubernetes.io/projected/dbbacbbc-f946-45cc-abdb-0389500e5c19-kube-api-access-2ckt5\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.441973 4899 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.442070 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.442109 4899 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/953831ef-1753-4bd9-896d-a769b212b0ae-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.442142 4899 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.442154 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.442193 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/c2decdc4-50c4-4370-8551-fe73d6918bcd-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.442205 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2decdc4-50c4-4370-8551-fe73d6918bcd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.444467 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "dbbacbbc-f946-45cc-abdb-0389500e5c19" (UID: "dbbacbbc-f946-45cc-abdb-0389500e5c19"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.454562 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-config-data" (OuterVolumeSpecName: "config-data") pod "953831ef-1753-4bd9-896d-a769b212b0ae" (UID: "953831ef-1753-4bd9-896d-a769b212b0ae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.463493 4899 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.467651 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "dbbacbbc-f946-45cc-abdb-0389500e5c19" (UID: "dbbacbbc-f946-45cc-abdb-0389500e5c19"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.543796 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.543832 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dbbacbbc-f946-45cc-abdb-0389500e5c19-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.543842 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/953831ef-1753-4bd9-896d-a769b212b0ae-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.543851 4899 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.673829 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.674387 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" event={"ID":"dbbacbbc-f946-45cc-abdb-0389500e5c19","Type":"ContainerDied","Data":"e357b5cc4b256c5db51aeb80f47d686e0a3fca5462cadeef72438054a958a944"} Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.686615 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"953831ef-1753-4bd9-896d-a769b212b0ae","Type":"ContainerDied","Data":"c93993bf71c5ad6f03f515f4aefbb3853f156a7607a2e6c1a5d4fc9f621aea37"} Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.686755 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.690228 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-rh6dp" event={"ID":"c2decdc4-50c4-4370-8551-fe73d6918bcd","Type":"ContainerDied","Data":"6b2f90774db6fb4d91595965f4c892211148e0c367c4cbfdd9a317b88e9fbfb5"} Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.690274 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b2f90774db6fb4d91595965f4c892211148e0c367c4cbfdd9a317b88e9fbfb5" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.690253 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-rh6dp" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.711098 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-nj5rb"] Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.717534 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-nj5rb"] Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.729748 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.742957 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.759476 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 08:56:41 crc kubenswrapper[4899]: E1003 08:56:41.760067 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbbacbbc-f946-45cc-abdb-0389500e5c19" containerName="init" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.760091 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbbacbbc-f946-45cc-abdb-0389500e5c19" containerName="init" Oct 03 08:56:41 crc kubenswrapper[4899]: E1003 08:56:41.760597 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2decdc4-50c4-4370-8551-fe73d6918bcd" containerName="neutron-db-sync" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.760614 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2decdc4-50c4-4370-8551-fe73d6918bcd" containerName="neutron-db-sync" Oct 03 08:56:41 crc kubenswrapper[4899]: E1003 08:56:41.760631 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="953831ef-1753-4bd9-896d-a769b212b0ae" containerName="glance-log" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.760638 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="953831ef-1753-4bd9-896d-a769b212b0ae" containerName="glance-log" Oct 03 08:56:41 crc kubenswrapper[4899]: E1003 08:56:41.760656 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbbacbbc-f946-45cc-abdb-0389500e5c19" containerName="dnsmasq-dns" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.760663 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbbacbbc-f946-45cc-abdb-0389500e5c19" containerName="dnsmasq-dns" Oct 03 08:56:41 crc kubenswrapper[4899]: E1003 08:56:41.760676 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="953831ef-1753-4bd9-896d-a769b212b0ae" containerName="glance-httpd" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.760682 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="953831ef-1753-4bd9-896d-a769b212b0ae" containerName="glance-httpd" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.760924 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="953831ef-1753-4bd9-896d-a769b212b0ae" containerName="glance-httpd" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.760941 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="953831ef-1753-4bd9-896d-a769b212b0ae" containerName="glance-log" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.760963 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbbacbbc-f946-45cc-abdb-0389500e5c19" containerName="dnsmasq-dns" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.760976 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2decdc4-50c4-4370-8551-fe73d6918bcd" containerName="neutron-db-sync" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.762398 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.765298 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.765691 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.774307 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.851752 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-config-data\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.852161 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.852297 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.852393 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.852484 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.852590 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-logs\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.852666 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbv7d\" (UniqueName: \"kubernetes.io/projected/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-kube-api-access-kbv7d\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.852793 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-scripts\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.954439 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-logs\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.954555 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbv7d\" (UniqueName: \"kubernetes.io/projected/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-kube-api-access-kbv7d\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.954846 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-scripts\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.954842 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-logs\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.954983 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-config-data\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.955032 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.955077 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.955119 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.955197 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.955527 4899 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.956196 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.960230 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-scripts\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.960476 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-config-data\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.961580 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.963677 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.970543 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbv7d\" (UniqueName: \"kubernetes.io/projected/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-kube-api-access-kbv7d\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:41 crc kubenswrapper[4899]: I1003 08:56:41.980011 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " pod="openstack/glance-default-external-api-0" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.096467 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.549120 4899 scope.go:117] "RemoveContainer" containerID="02a1cc6f41530715dbbd649b9906bbb19a5a1d299f5cf9afbbd38020628f5d5c" Oct 03 08:56:42 crc kubenswrapper[4899]: E1003 08:56:42.549584 4899 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Oct 03 08:56:42 crc kubenswrapper[4899]: E1003 08:56:42.549724 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cf5vp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-q9f5f_openstack(5545cd7a-7849-48e5-91f3-6a3a8d51e665): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 08:56:42 crc kubenswrapper[4899]: E1003 08:56:42.553514 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-q9f5f" podUID="5545cd7a-7849-48e5-91f3-6a3a8d51e665" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.558086 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="953831ef-1753-4bd9-896d-a769b212b0ae" path="/var/lib/kubelet/pods/953831ef-1753-4bd9-896d-a769b212b0ae/volumes" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.571927 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbbacbbc-f946-45cc-abdb-0389500e5c19" path="/var/lib/kubelet/pods/dbbacbbc-f946-45cc-abdb-0389500e5c19/volumes" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.577490 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-c8f7n"] Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.578935 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.619666 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-c8f7n"] Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.667049 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5dc9cdc98b-cgxhj"] Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.669273 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-c8f7n\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.669574 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-c8f7n\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.669627 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.669729 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkl5x\" (UniqueName: \"kubernetes.io/projected/a50d8934-3720-4f4e-a702-bf8b43090f52-kube-api-access-rkl5x\") pod \"dnsmasq-dns-6b7b667979-c8f7n\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.670002 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-c8f7n\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.670042 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-dns-svc\") pod \"dnsmasq-dns-6b7b667979-c8f7n\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.670206 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-config\") pod \"dnsmasq-dns-6b7b667979-c8f7n\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.675496 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.676208 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.676993 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.677165 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-4flr9" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.683106 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5dc9cdc98b-cgxhj"] Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.688991 4899 scope.go:117] "RemoveContainer" containerID="2d57925db20a8b04d78c247124c30c43b01a5af3c3cb897c7a01aa2ea9616655" Oct 03 08:56:42 crc kubenswrapper[4899]: E1003 08:56:42.736488 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-q9f5f" podUID="5545cd7a-7849-48e5-91f3-6a3a8d51e665" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.772741 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-httpd-config\") pod \"neutron-5dc9cdc98b-cgxhj\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.772789 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-combined-ca-bundle\") pod \"neutron-5dc9cdc98b-cgxhj\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.772868 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-c8f7n\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.772943 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-dns-svc\") pod \"dnsmasq-dns-6b7b667979-c8f7n\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.773764 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-config\") pod \"dnsmasq-dns-6b7b667979-c8f7n\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.773990 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-c8f7n\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.774020 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-ovndb-tls-certs\") pod \"neutron-5dc9cdc98b-cgxhj\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.774064 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-dns-svc\") pod \"dnsmasq-dns-6b7b667979-c8f7n\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.774077 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6v9k\" (UniqueName: \"kubernetes.io/projected/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-kube-api-access-v6v9k\") pod \"neutron-5dc9cdc98b-cgxhj\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.774183 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-c8f7n\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.774236 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkl5x\" (UniqueName: \"kubernetes.io/projected/a50d8934-3720-4f4e-a702-bf8b43090f52-kube-api-access-rkl5x\") pod \"dnsmasq-dns-6b7b667979-c8f7n\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.774308 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-config\") pod \"neutron-5dc9cdc98b-cgxhj\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.774902 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-c8f7n\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.776980 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-c8f7n\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.778229 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-config\") pod \"dnsmasq-dns-6b7b667979-c8f7n\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.778535 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-c8f7n\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.799051 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkl5x\" (UniqueName: \"kubernetes.io/projected/a50d8934-3720-4f4e-a702-bf8b43090f52-kube-api-access-rkl5x\") pod \"dnsmasq-dns-6b7b667979-c8f7n\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.860156 4899 scope.go:117] "RemoveContainer" containerID="fb3e84e8ee193f58786e8f6657b3fdc52745c71de5fd1fd1ba04f87c9945f199" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.877067 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-config\") pod \"neutron-5dc9cdc98b-cgxhj\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.877147 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-httpd-config\") pod \"neutron-5dc9cdc98b-cgxhj\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.877174 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-combined-ca-bundle\") pod \"neutron-5dc9cdc98b-cgxhj\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.877548 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-ovndb-tls-certs\") pod \"neutron-5dc9cdc98b-cgxhj\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.877579 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6v9k\" (UniqueName: \"kubernetes.io/projected/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-kube-api-access-v6v9k\") pod \"neutron-5dc9cdc98b-cgxhj\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.883197 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-config\") pod \"neutron-5dc9cdc98b-cgxhj\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.886191 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-combined-ca-bundle\") pod \"neutron-5dc9cdc98b-cgxhj\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.886247 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-httpd-config\") pod \"neutron-5dc9cdc98b-cgxhj\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.894555 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-ovndb-tls-certs\") pod \"neutron-5dc9cdc98b-cgxhj\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.901354 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6v9k\" (UniqueName: \"kubernetes.io/projected/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-kube-api-access-v6v9k\") pod \"neutron-5dc9cdc98b-cgxhj\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:56:42 crc kubenswrapper[4899]: I1003 08:56:42.956567 4899 scope.go:117] "RemoveContainer" containerID="40d11dbfd51809b83c8e32413f91fa2902f3ecfe50c3404bdbbd259bb15e52ef" Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.018296 4899 scope.go:117] "RemoveContainer" containerID="771387d4685d19b31d35bec31c5a63dfb6139e8817b7586b1a9c8af6e7f016be" Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.046734 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.062364 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.222270 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-5rpwq"] Oct 03 08:56:43 crc kubenswrapper[4899]: W1003 08:56:43.244025 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod194b2a4b_c8aa_4590_a400_f54b03904ecf.slice/crio-2198f838d3515e5768ce0dfb700de25ee14da315ddabd1974295d25d7b6ed79a WatchSource:0}: Error finding container 2198f838d3515e5768ce0dfb700de25ee14da315ddabd1974295d25d7b6ed79a: Status 404 returned error can't find the container with id 2198f838d3515e5768ce0dfb700de25ee14da315ddabd1974295d25d7b6ed79a Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.379600 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5bcb4b4796-x4jmr"] Oct 03 08:56:43 crc kubenswrapper[4899]: W1003 08:56:43.398599 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2eba2a41_6a16_4f77_a699_157fb6fa7b3f.slice/crio-b5d89b934bc4a692179d707b8a18e73008072ebcc53eaff4c9a58e3f77622507 WatchSource:0}: Error finding container b5d89b934bc4a692179d707b8a18e73008072ebcc53eaff4c9a58e3f77622507: Status 404 returned error can't find the container with id b5d89b934bc4a692179d707b8a18e73008072ebcc53eaff4c9a58e3f77622507 Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.404522 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7f5ccd89b4-5dfm2"] Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.463088 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.733232 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-c8f7n"] Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.734406 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"894a907e-c584-4671-9ba8-e4b1df804b5b","Type":"ContainerStarted","Data":"df155f8e43821a3d0b1747326566060cda5fd8fe0638167c40ac98ad9358bfdd"} Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.737668 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5bcb4b4796-x4jmr" event={"ID":"2eba2a41-6a16-4f77-a699-157fb6fa7b3f","Type":"ContainerStarted","Data":"b5d89b934bc4a692179d707b8a18e73008072ebcc53eaff4c9a58e3f77622507"} Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.739528 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jf5f9" event={"ID":"5d0954fe-c339-493b-a2ca-5d30b54bc603","Type":"ContainerStarted","Data":"4e06dd4031a4a05ffb46e1fd96867356a33a50e1fea760358ba333e12f578a99"} Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.763848 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-jf5f9" podStartSLOduration=5.771563378 podStartE2EDuration="33.763826492s" podCreationTimestamp="2025-10-03 08:56:10 +0000 UTC" firstStartedPulling="2025-10-03 08:56:14.918030913 +0000 UTC m=+949.025515866" lastFinishedPulling="2025-10-03 08:56:42.910294027 +0000 UTC m=+977.017778980" observedRunningTime="2025-10-03 08:56:43.759733373 +0000 UTC m=+977.867218326" watchObservedRunningTime="2025-10-03 08:56:43.763826492 +0000 UTC m=+977.871311455" Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.767481 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8dc2bbb6-802d-4713-935a-353136d48619","Type":"ContainerStarted","Data":"b2f84a546cd6d33241b3c7fc75baa9b04e3971c497f03dfb13cc00a32d93778d"} Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.775759 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7f5ccd89b4-5dfm2" event={"ID":"bf908711-a33e-40be-b5a0-c82254721d41","Type":"ContainerStarted","Data":"1a7a92c4a6a40aadab1be9b8ca002fed58272f58aad0959ed73323057d6c4f01"} Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.782367 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-58b885464f-hm46m" podUID="c45fd4cf-daa8-4226-bb75-c55604b5ccb6" containerName="horizon-log" containerID="cri-o://e3df63268a3c07392e47cc8dfd3b47152d0e4dda655ace80d26f905a7d356c67" gracePeriod=30 Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.782467 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-58b885464f-hm46m" event={"ID":"c45fd4cf-daa8-4226-bb75-c55604b5ccb6","Type":"ContainerStarted","Data":"771eb46f0fe70dd3f1edce1943952deb9b0d001bdb141f2bf295b3bc3fdafc95"} Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.782480 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-58b885464f-hm46m" podUID="c45fd4cf-daa8-4226-bb75-c55604b5ccb6" containerName="horizon" containerID="cri-o://771eb46f0fe70dd3f1edce1943952deb9b0d001bdb141f2bf295b3bc3fdafc95" gracePeriod=30 Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.782533 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-58b885464f-hm46m" event={"ID":"c45fd4cf-daa8-4226-bb75-c55604b5ccb6","Type":"ContainerStarted","Data":"e3df63268a3c07392e47cc8dfd3b47152d0e4dda655ace80d26f905a7d356c67"} Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.804566 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5874b7f477-6z4bj" event={"ID":"3dd81aae-6922-43f5-83a6-c81a06bfedea","Type":"ContainerStarted","Data":"cd54378665cb0a1b706025fd9f0974ad505e52fc1e586b486c5ca7938f97af55"} Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.804987 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5874b7f477-6z4bj" podUID="3dd81aae-6922-43f5-83a6-c81a06bfedea" containerName="horizon-log" containerID="cri-o://cd54378665cb0a1b706025fd9f0974ad505e52fc1e586b486c5ca7938f97af55" gracePeriod=30 Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.805542 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5874b7f477-6z4bj" podUID="3dd81aae-6922-43f5-83a6-c81a06bfedea" containerName="horizon" containerID="cri-o://b0ac4e0b7f50ac475ffc9d876635758dbf019e7300011f0b1340590c4b5a488a" gracePeriod=30 Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.809681 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78dbfb6845-276zt" event={"ID":"190b54d9-49f3-4d1a-aac5-ab680fafa605","Type":"ContainerStarted","Data":"a8817e1b1d2b80fb410e861131738b722f9468770e8f28f2acbbee904de13e81"} Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.809952 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-78dbfb6845-276zt" podUID="190b54d9-49f3-4d1a-aac5-ab680fafa605" containerName="horizon-log" containerID="cri-o://a8817e1b1d2b80fb410e861131738b722f9468770e8f28f2acbbee904de13e81" gracePeriod=30 Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.810077 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-78dbfb6845-276zt" podUID="190b54d9-49f3-4d1a-aac5-ab680fafa605" containerName="horizon" containerID="cri-o://a2d011c3ab331136c75d43fec472b9a75f8908d45d50156eb62098d8cf45d831" gracePeriod=30 Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.816093 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5rpwq" event={"ID":"194b2a4b-c8aa-4590-a400-f54b03904ecf","Type":"ContainerStarted","Data":"2198f838d3515e5768ce0dfb700de25ee14da315ddabd1974295d25d7b6ed79a"} Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.851992 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5dc9cdc98b-cgxhj"] Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.882346 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-58b885464f-hm46m" podStartSLOduration=2.808530962 podStartE2EDuration="33.882321679s" podCreationTimestamp="2025-10-03 08:56:10 +0000 UTC" firstStartedPulling="2025-10-03 08:56:11.532857685 +0000 UTC m=+945.640342638" lastFinishedPulling="2025-10-03 08:56:42.606648402 +0000 UTC m=+976.714133355" observedRunningTime="2025-10-03 08:56:43.834664176 +0000 UTC m=+977.942149129" watchObservedRunningTime="2025-10-03 08:56:43.882321679 +0000 UTC m=+977.989806632" Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.895771 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-78dbfb6845-276zt" podStartSLOduration=7.551179611 podStartE2EDuration="33.895751192s" podCreationTimestamp="2025-10-03 08:56:10 +0000 UTC" firstStartedPulling="2025-10-03 08:56:14.835226586 +0000 UTC m=+948.942711539" lastFinishedPulling="2025-10-03 08:56:41.179798167 +0000 UTC m=+975.287283120" observedRunningTime="2025-10-03 08:56:43.854348297 +0000 UTC m=+977.961833260" watchObservedRunningTime="2025-10-03 08:56:43.895751192 +0000 UTC m=+978.003236145" Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.913968 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5874b7f477-6z4bj" podStartSLOduration=3.288840939 podStartE2EDuration="30.913946096s" podCreationTimestamp="2025-10-03 08:56:13 +0000 UTC" firstStartedPulling="2025-10-03 08:56:14.981530474 +0000 UTC m=+949.089015427" lastFinishedPulling="2025-10-03 08:56:42.606635631 +0000 UTC m=+976.714120584" observedRunningTime="2025-10-03 08:56:43.885505779 +0000 UTC m=+977.992990752" watchObservedRunningTime="2025-10-03 08:56:43.913946096 +0000 UTC m=+978.021431049" Oct 03 08:56:43 crc kubenswrapper[4899]: I1003 08:56:43.921417 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-5rpwq" podStartSLOduration=12.921392961 podStartE2EDuration="12.921392961s" podCreationTimestamp="2025-10-03 08:56:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:56:43.904389055 +0000 UTC m=+978.011874008" watchObservedRunningTime="2025-10-03 08:56:43.921392961 +0000 UTC m=+978.028877914" Oct 03 08:56:44 crc kubenswrapper[4899]: I1003 08:56:44.227948 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 08:56:44 crc kubenswrapper[4899]: I1003 08:56:44.247490 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:56:44 crc kubenswrapper[4899]: I1003 08:56:44.850470 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5bcb4b4796-x4jmr" event={"ID":"2eba2a41-6a16-4f77-a699-157fb6fa7b3f","Type":"ContainerStarted","Data":"2af927b92552cb7d46e1ad7165e03c49307e20b3511b616a846aab7a5ac52533"} Oct 03 08:56:44 crc kubenswrapper[4899]: I1003 08:56:44.850805 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5bcb4b4796-x4jmr" event={"ID":"2eba2a41-6a16-4f77-a699-157fb6fa7b3f","Type":"ContainerStarted","Data":"8c3b5ed2a58fe798c1bcc02dbfbe942f01e86c53e3fd06877aa330e11f9e69c3"} Oct 03 08:56:44 crc kubenswrapper[4899]: I1003 08:56:44.869145 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"405fb02a-9786-4dc3-a4a6-bf885f7a75fe","Type":"ContainerStarted","Data":"1baf933e6a1d312c404f2c0b7cdc150fe7becdbdcd9aa0e9e0b8c0cdae9da205"} Oct 03 08:56:44 crc kubenswrapper[4899]: I1003 08:56:44.917265 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7f5ccd89b4-5dfm2" event={"ID":"bf908711-a33e-40be-b5a0-c82254721d41","Type":"ContainerStarted","Data":"e4a0e6ba517ad84431c1cf99a83d8e4f5a94611bd40924983c878a144f12a39b"} Oct 03 08:56:44 crc kubenswrapper[4899]: I1003 08:56:44.917324 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7f5ccd89b4-5dfm2" event={"ID":"bf908711-a33e-40be-b5a0-c82254721d41","Type":"ContainerStarted","Data":"9c85d08f80dab86c1783a1736d0569736626c5fec07e54e53fbb57e096efbdbc"} Oct 03 08:56:44 crc kubenswrapper[4899]: I1003 08:56:44.939138 4899 generic.go:334] "Generic (PLEG): container finished" podID="a50d8934-3720-4f4e-a702-bf8b43090f52" containerID="eb0d94f717510d452a2595c8ed1663ddf08621a452a626a36ff79fb72cfe9833" exitCode=0 Oct 03 08:56:44 crc kubenswrapper[4899]: I1003 08:56:44.939340 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" event={"ID":"a50d8934-3720-4f4e-a702-bf8b43090f52","Type":"ContainerDied","Data":"eb0d94f717510d452a2595c8ed1663ddf08621a452a626a36ff79fb72cfe9833"} Oct 03 08:56:44 crc kubenswrapper[4899]: I1003 08:56:44.939435 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" event={"ID":"a50d8934-3720-4f4e-a702-bf8b43090f52","Type":"ContainerStarted","Data":"f20b007ce9317ff3e33743597f1d13e3233d73b81d03f2a7e1bd117e74078e46"} Oct 03 08:56:44 crc kubenswrapper[4899]: I1003 08:56:44.948437 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5bcb4b4796-x4jmr" podStartSLOduration=24.948418967 podStartE2EDuration="24.948418967s" podCreationTimestamp="2025-10-03 08:56:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:56:44.882913682 +0000 UTC m=+978.990398635" watchObservedRunningTime="2025-10-03 08:56:44.948418967 +0000 UTC m=+979.055903920" Oct 03 08:56:44 crc kubenswrapper[4899]: I1003 08:56:44.956473 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7f5ccd89b4-5dfm2" podStartSLOduration=24.956458221 podStartE2EDuration="24.956458221s" podCreationTimestamp="2025-10-03 08:56:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:56:44.942870572 +0000 UTC m=+979.050355525" watchObservedRunningTime="2025-10-03 08:56:44.956458221 +0000 UTC m=+979.063943174" Oct 03 08:56:44 crc kubenswrapper[4899]: I1003 08:56:44.969334 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-cdb85d7df-9hdqn"] Oct 03 08:56:44 crc kubenswrapper[4899]: I1003 08:56:44.970812 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:44 crc kubenswrapper[4899]: I1003 08:56:44.977304 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 03 08:56:44 crc kubenswrapper[4899]: I1003 08:56:44.977521 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 03 08:56:44 crc kubenswrapper[4899]: I1003 08:56:44.982116 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5874b7f477-6z4bj" event={"ID":"3dd81aae-6922-43f5-83a6-c81a06bfedea","Type":"ContainerStarted","Data":"b0ac4e0b7f50ac475ffc9d876635758dbf019e7300011f0b1340590c4b5a488a"} Oct 03 08:56:44 crc kubenswrapper[4899]: I1003 08:56:44.984353 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78dbfb6845-276zt" event={"ID":"190b54d9-49f3-4d1a-aac5-ab680fafa605","Type":"ContainerStarted","Data":"a2d011c3ab331136c75d43fec472b9a75f8908d45d50156eb62098d8cf45d831"} Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.004912 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5rpwq" event={"ID":"194b2a4b-c8aa-4590-a400-f54b03904ecf","Type":"ContainerStarted","Data":"0884fff84a52ad2b22541251c235b6dd4fd12d11c778d61d51d5c2e38bf2da94"} Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.005251 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5dc9cdc98b-cgxhj" event={"ID":"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73","Type":"ContainerStarted","Data":"bbf2fd19a32c7a8f0bf403d2e7a0db3a12683c31ebf1d37d2430fdb7bf52e8ea"} Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.005266 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5dc9cdc98b-cgxhj" event={"ID":"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73","Type":"ContainerStarted","Data":"fbba50d7ae43014968b668332d8762974ceb1eeeb294cf2e7a6ea91b83979406"} Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.008458 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.010920 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-cdb85d7df-9hdqn"] Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.037483 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e425bd0-71ee-4b86-a246-e31d103a8745-ovndb-tls-certs\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.037576 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e425bd0-71ee-4b86-a246-e31d103a8745-internal-tls-certs\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.037606 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3e425bd0-71ee-4b86-a246-e31d103a8745-httpd-config\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.037776 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e425bd0-71ee-4b86-a246-e31d103a8745-combined-ca-bundle\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.037827 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86jk2\" (UniqueName: \"kubernetes.io/projected/3e425bd0-71ee-4b86-a246-e31d103a8745-kube-api-access-86jk2\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.037879 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e425bd0-71ee-4b86-a246-e31d103a8745-public-tls-certs\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.037933 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3e425bd0-71ee-4b86-a246-e31d103a8745-config\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.056762 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5dc9cdc98b-cgxhj" podStartSLOduration=3.056747074 podStartE2EDuration="3.056747074s" podCreationTimestamp="2025-10-03 08:56:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:56:45.045777597 +0000 UTC m=+979.153262540" watchObservedRunningTime="2025-10-03 08:56:45.056747074 +0000 UTC m=+979.164232027" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.140064 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e425bd0-71ee-4b86-a246-e31d103a8745-combined-ca-bundle\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.140137 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86jk2\" (UniqueName: \"kubernetes.io/projected/3e425bd0-71ee-4b86-a246-e31d103a8745-kube-api-access-86jk2\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.140173 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e425bd0-71ee-4b86-a246-e31d103a8745-public-tls-certs\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.140231 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3e425bd0-71ee-4b86-a246-e31d103a8745-config\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.140269 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e425bd0-71ee-4b86-a246-e31d103a8745-ovndb-tls-certs\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.140330 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e425bd0-71ee-4b86-a246-e31d103a8745-internal-tls-certs\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.140355 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3e425bd0-71ee-4b86-a246-e31d103a8745-httpd-config\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.149745 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/3e425bd0-71ee-4b86-a246-e31d103a8745-config\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.150012 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e425bd0-71ee-4b86-a246-e31d103a8745-public-tls-certs\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.151503 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e425bd0-71ee-4b86-a246-e31d103a8745-combined-ca-bundle\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.174531 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3e425bd0-71ee-4b86-a246-e31d103a8745-httpd-config\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.176870 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e425bd0-71ee-4b86-a246-e31d103a8745-internal-tls-certs\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.177452 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e425bd0-71ee-4b86-a246-e31d103a8745-ovndb-tls-certs\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.178740 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86jk2\" (UniqueName: \"kubernetes.io/projected/3e425bd0-71ee-4b86-a246-e31d103a8745-kube-api-access-86jk2\") pod \"neutron-cdb85d7df-9hdqn\" (UID: \"3e425bd0-71ee-4b86-a246-e31d103a8745\") " pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.349334 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:45 crc kubenswrapper[4899]: I1003 08:56:45.932672 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-nj5rb" podUID="dbbacbbc-f946-45cc-abdb-0389500e5c19" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: i/o timeout" Oct 03 08:56:46 crc kubenswrapper[4899]: I1003 08:56:46.017424 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"405fb02a-9786-4dc3-a4a6-bf885f7a75fe","Type":"ContainerStarted","Data":"b3bcfd680d21dffd9a651646bb698ef6ce38b1311bd4b55221c2a36cfb915611"} Oct 03 08:56:46 crc kubenswrapper[4899]: I1003 08:56:46.022344 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"894a907e-c584-4671-9ba8-e4b1df804b5b","Type":"ContainerStarted","Data":"d8e9a0d728d0a78bc45d8f5dc222e918da9039d54b35fd14862f4518b06a1ebc"} Oct 03 08:56:46 crc kubenswrapper[4899]: I1003 08:56:46.022396 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"894a907e-c584-4671-9ba8-e4b1df804b5b","Type":"ContainerStarted","Data":"32a3e5375a1223b70531d108e09150a9f5890189799bfa1dc1526e56b37f19b1"} Oct 03 08:56:46 crc kubenswrapper[4899]: I1003 08:56:46.024740 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" event={"ID":"a50d8934-3720-4f4e-a702-bf8b43090f52","Type":"ContainerStarted","Data":"d86db3b904decf04300f016bf56997d71c802a962bc948fd94ad20deebf0cf19"} Oct 03 08:56:46 crc kubenswrapper[4899]: I1003 08:56:46.025015 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:46 crc kubenswrapper[4899]: I1003 08:56:46.040243 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5dc9cdc98b-cgxhj_a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73/neutron-httpd/0.log" Oct 03 08:56:46 crc kubenswrapper[4899]: I1003 08:56:46.044212 4899 generic.go:334] "Generic (PLEG): container finished" podID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" containerID="8dab33695bf19fbc7580080161bbece7516135f230c717999527cdddfe6c022b" exitCode=1 Oct 03 08:56:46 crc kubenswrapper[4899]: I1003 08:56:46.044456 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5dc9cdc98b-cgxhj" event={"ID":"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73","Type":"ContainerDied","Data":"8dab33695bf19fbc7580080161bbece7516135f230c717999527cdddfe6c022b"} Oct 03 08:56:46 crc kubenswrapper[4899]: I1003 08:56:46.045416 4899 scope.go:117] "RemoveContainer" containerID="8dab33695bf19fbc7580080161bbece7516135f230c717999527cdddfe6c022b" Oct 03 08:56:46 crc kubenswrapper[4899]: I1003 08:56:46.056911 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=19.056869171 podStartE2EDuration="19.056869171s" podCreationTimestamp="2025-10-03 08:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:56:46.051604146 +0000 UTC m=+980.159089109" watchObservedRunningTime="2025-10-03 08:56:46.056869171 +0000 UTC m=+980.164354144" Oct 03 08:56:46 crc kubenswrapper[4899]: I1003 08:56:46.117709 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" podStartSLOduration=4.11768502 podStartE2EDuration="4.11768502s" podCreationTimestamp="2025-10-03 08:56:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:56:46.106277819 +0000 UTC m=+980.213762812" watchObservedRunningTime="2025-10-03 08:56:46.11768502 +0000 UTC m=+980.225169973" Oct 03 08:56:46 crc kubenswrapper[4899]: I1003 08:56:46.682805 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-cdb85d7df-9hdqn"] Oct 03 08:56:47 crc kubenswrapper[4899]: I1003 08:56:47.079741 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-cdb85d7df-9hdqn" event={"ID":"3e425bd0-71ee-4b86-a246-e31d103a8745","Type":"ContainerStarted","Data":"e436e252351dc8dfe4c7af083cb7c3e7250c484113cac770551c2c4d037203b1"} Oct 03 08:56:48 crc kubenswrapper[4899]: I1003 08:56:48.090595 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5dc9cdc98b-cgxhj_a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73/neutron-httpd/1.log" Oct 03 08:56:48 crc kubenswrapper[4899]: I1003 08:56:48.091406 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5dc9cdc98b-cgxhj_a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73/neutron-httpd/0.log" Oct 03 08:56:48 crc kubenswrapper[4899]: I1003 08:56:48.091706 4899 generic.go:334] "Generic (PLEG): container finished" podID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" containerID="77870a2057bf43277803181b734abba5725d1f143ab9c8bf28274f3988793769" exitCode=1 Oct 03 08:56:48 crc kubenswrapper[4899]: I1003 08:56:48.091770 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5dc9cdc98b-cgxhj" event={"ID":"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73","Type":"ContainerDied","Data":"77870a2057bf43277803181b734abba5725d1f143ab9c8bf28274f3988793769"} Oct 03 08:56:48 crc kubenswrapper[4899]: I1003 08:56:48.091818 4899 scope.go:117] "RemoveContainer" containerID="8dab33695bf19fbc7580080161bbece7516135f230c717999527cdddfe6c022b" Oct 03 08:56:48 crc kubenswrapper[4899]: I1003 08:56:48.092851 4899 scope.go:117] "RemoveContainer" containerID="77870a2057bf43277803181b734abba5725d1f143ab9c8bf28274f3988793769" Oct 03 08:56:48 crc kubenswrapper[4899]: E1003 08:56:48.093169 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"neutron-httpd\" with CrashLoopBackOff: \"back-off 10s restarting failed container=neutron-httpd pod=neutron-5dc9cdc98b-cgxhj_openstack(a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73)\"" pod="openstack/neutron-5dc9cdc98b-cgxhj" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" Oct 03 08:56:48 crc kubenswrapper[4899]: I1003 08:56:48.099191 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"405fb02a-9786-4dc3-a4a6-bf885f7a75fe","Type":"ContainerStarted","Data":"acbded3a0caa4a0ee411e002a31288813cc64cc755b63cb8ad83dfd1ea64b4fc"} Oct 03 08:56:48 crc kubenswrapper[4899]: I1003 08:56:48.101448 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8dc2bbb6-802d-4713-935a-353136d48619","Type":"ContainerStarted","Data":"f0400fdc909b6bad5c8efce24af3f745da00e6fe9bef5a7559235ed9bfad53aa"} Oct 03 08:56:48 crc kubenswrapper[4899]: I1003 08:56:48.105408 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-cdb85d7df-9hdqn" event={"ID":"3e425bd0-71ee-4b86-a246-e31d103a8745","Type":"ContainerStarted","Data":"38ec3ab6f42124ad6e922e49632122ee33e13fdbda590b227d727cb934d37e77"} Oct 03 08:56:48 crc kubenswrapper[4899]: I1003 08:56:48.105541 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-cdb85d7df-9hdqn" event={"ID":"3e425bd0-71ee-4b86-a246-e31d103a8745","Type":"ContainerStarted","Data":"df1fb9c313b6a371dae4039701217c80eafb52cccbf0c70b660b44645a5fca82"} Oct 03 08:56:48 crc kubenswrapper[4899]: I1003 08:56:48.106091 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:56:48 crc kubenswrapper[4899]: I1003 08:56:48.141398 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-cdb85d7df-9hdqn" podStartSLOduration=4.141379965 podStartE2EDuration="4.141379965s" podCreationTimestamp="2025-10-03 08:56:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:56:48.139598099 +0000 UTC m=+982.247083052" watchObservedRunningTime="2025-10-03 08:56:48.141379965 +0000 UTC m=+982.248864918" Oct 03 08:56:48 crc kubenswrapper[4899]: I1003 08:56:48.165854 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.165830316 podStartE2EDuration="7.165830316s" podCreationTimestamp="2025-10-03 08:56:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:56:48.163415791 +0000 UTC m=+982.270900754" watchObservedRunningTime="2025-10-03 08:56:48.165830316 +0000 UTC m=+982.273315269" Oct 03 08:56:48 crc kubenswrapper[4899]: I1003 08:56:48.246708 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 08:56:48 crc kubenswrapper[4899]: I1003 08:56:48.247053 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 08:56:48 crc kubenswrapper[4899]: I1003 08:56:48.286041 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 08:56:48 crc kubenswrapper[4899]: I1003 08:56:48.332549 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 08:56:49 crc kubenswrapper[4899]: I1003 08:56:49.148524 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5dc9cdc98b-cgxhj_a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73/neutron-httpd/1.log" Oct 03 08:56:49 crc kubenswrapper[4899]: I1003 08:56:49.153690 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 08:56:49 crc kubenswrapper[4899]: I1003 08:56:49.153937 4899 scope.go:117] "RemoveContainer" containerID="77870a2057bf43277803181b734abba5725d1f143ab9c8bf28274f3988793769" Oct 03 08:56:49 crc kubenswrapper[4899]: I1003 08:56:49.154387 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 08:56:49 crc kubenswrapper[4899]: E1003 08:56:49.154619 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"neutron-httpd\" with CrashLoopBackOff: \"back-off 10s restarting failed container=neutron-httpd pod=neutron-5dc9cdc98b-cgxhj_openstack(a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73)\"" pod="openstack/neutron-5dc9cdc98b-cgxhj" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" Oct 03 08:56:49 crc kubenswrapper[4899]: E1003 08:56:49.278405 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod194b2a4b_c8aa_4590_a400_f54b03904ecf.slice/crio-0884fff84a52ad2b22541251c235b6dd4fd12d11c778d61d51d5c2e38bf2da94.scope\": RecentStats: unable to find data in memory cache]" Oct 03 08:56:50 crc kubenswrapper[4899]: I1003 08:56:50.187526 4899 generic.go:334] "Generic (PLEG): container finished" podID="194b2a4b-c8aa-4590-a400-f54b03904ecf" containerID="0884fff84a52ad2b22541251c235b6dd4fd12d11c778d61d51d5c2e38bf2da94" exitCode=0 Oct 03 08:56:50 crc kubenswrapper[4899]: I1003 08:56:50.187821 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5rpwq" event={"ID":"194b2a4b-c8aa-4590-a400-f54b03904ecf","Type":"ContainerDied","Data":"0884fff84a52ad2b22541251c235b6dd4fd12d11c778d61d51d5c2e38bf2da94"} Oct 03 08:56:50 crc kubenswrapper[4899]: I1003 08:56:50.191490 4899 generic.go:334] "Generic (PLEG): container finished" podID="5d0954fe-c339-493b-a2ca-5d30b54bc603" containerID="4e06dd4031a4a05ffb46e1fd96867356a33a50e1fea760358ba333e12f578a99" exitCode=0 Oct 03 08:56:50 crc kubenswrapper[4899]: I1003 08:56:50.191533 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jf5f9" event={"ID":"5d0954fe-c339-493b-a2ca-5d30b54bc603","Type":"ContainerDied","Data":"4e06dd4031a4a05ffb46e1fd96867356a33a50e1fea760358ba333e12f578a99"} Oct 03 08:56:50 crc kubenswrapper[4899]: I1003 08:56:50.193928 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-69lhb" event={"ID":"4663e6c0-d0e2-49f9-a457-8bc02fefa635","Type":"ContainerStarted","Data":"73484b2bfe6b3bff6a4cb1f73e2971c4f032021f044f264469f6546e5686573b"} Oct 03 08:56:50 crc kubenswrapper[4899]: I1003 08:56:50.686039 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:56:50 crc kubenswrapper[4899]: I1003 08:56:50.878870 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:50 crc kubenswrapper[4899]: I1003 08:56:50.879759 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:56:51 crc kubenswrapper[4899]: I1003 08:56:51.052189 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:51 crc kubenswrapper[4899]: I1003 08:56:51.053053 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:56:51 crc kubenswrapper[4899]: I1003 08:56:51.201178 4899 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 08:56:51 crc kubenswrapper[4899]: I1003 08:56:51.255836 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:56:51 crc kubenswrapper[4899]: I1003 08:56:51.878975 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 08:56:51 crc kubenswrapper[4899]: I1003 08:56:51.916295 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-69lhb" podStartSLOduration=7.805337091 podStartE2EDuration="41.916278604s" podCreationTimestamp="2025-10-03 08:56:10 +0000 UTC" firstStartedPulling="2025-10-03 08:56:14.970011511 +0000 UTC m=+949.077496464" lastFinishedPulling="2025-10-03 08:56:49.080953024 +0000 UTC m=+983.188437977" observedRunningTime="2025-10-03 08:56:50.255903366 +0000 UTC m=+984.363388329" watchObservedRunningTime="2025-10-03 08:56:51.916278604 +0000 UTC m=+986.023763557" Oct 03 08:56:52 crc kubenswrapper[4899]: I1003 08:56:52.097158 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 08:56:52 crc kubenswrapper[4899]: I1003 08:56:52.097225 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 08:56:52 crc kubenswrapper[4899]: I1003 08:56:52.145058 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 08:56:52 crc kubenswrapper[4899]: I1003 08:56:52.159424 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 08:56:52 crc kubenswrapper[4899]: I1003 08:56:52.214142 4899 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 08:56:52 crc kubenswrapper[4899]: I1003 08:56:52.215085 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 08:56:52 crc kubenswrapper[4899]: I1003 08:56:52.215678 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 08:56:52 crc kubenswrapper[4899]: I1003 08:56:52.749172 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 08:56:53 crc kubenswrapper[4899]: I1003 08:56:53.048697 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:56:53 crc kubenswrapper[4899]: I1003 08:56:53.128915 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-rp67d"] Oct 03 08:56:53 crc kubenswrapper[4899]: I1003 08:56:53.129440 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" podUID="a332aa78-c64e-46f2-b2a0-6cf8be20fe4c" containerName="dnsmasq-dns" containerID="cri-o://262e63396b2686aaaf8ba54c61164ff36f2bc5c2eee7937e8a9d7011a99db162" gracePeriod=10 Oct 03 08:56:53 crc kubenswrapper[4899]: I1003 08:56:53.234157 4899 generic.go:334] "Generic (PLEG): container finished" podID="4663e6c0-d0e2-49f9-a457-8bc02fefa635" containerID="73484b2bfe6b3bff6a4cb1f73e2971c4f032021f044f264469f6546e5686573b" exitCode=0 Oct 03 08:56:53 crc kubenswrapper[4899]: I1003 08:56:53.234458 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-69lhb" event={"ID":"4663e6c0-d0e2-49f9-a457-8bc02fefa635","Type":"ContainerDied","Data":"73484b2bfe6b3bff6a4cb1f73e2971c4f032021f044f264469f6546e5686573b"} Oct 03 08:56:54 crc kubenswrapper[4899]: I1003 08:56:54.246957 4899 generic.go:334] "Generic (PLEG): container finished" podID="a332aa78-c64e-46f2-b2a0-6cf8be20fe4c" containerID="262e63396b2686aaaf8ba54c61164ff36f2bc5c2eee7937e8a9d7011a99db162" exitCode=0 Oct 03 08:56:54 crc kubenswrapper[4899]: I1003 08:56:54.247039 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" event={"ID":"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c","Type":"ContainerDied","Data":"262e63396b2686aaaf8ba54c61164ff36f2bc5c2eee7937e8a9d7011a99db162"} Oct 03 08:56:54 crc kubenswrapper[4899]: I1003 08:56:54.247330 4899 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 08:56:54 crc kubenswrapper[4899]: I1003 08:56:54.247340 4899 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 08:56:54 crc kubenswrapper[4899]: I1003 08:56:54.872761 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 08:56:55 crc kubenswrapper[4899]: I1003 08:56:55.256282 4899 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 08:56:55 crc kubenswrapper[4899]: I1003 08:56:55.335134 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 08:56:56 crc kubenswrapper[4899]: I1003 08:56:56.165725 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" podUID="a332aa78-c64e-46f2-b2a0-6cf8be20fe4c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.140:5353: connect: connection refused" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.309988 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.311156 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jf5f9" event={"ID":"5d0954fe-c339-493b-a2ca-5d30b54bc603","Type":"ContainerDied","Data":"edcfe3cbf300022194c92c2212fdc85974a5e91c2994791b2d711a07b397fc1b"} Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.311190 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="edcfe3cbf300022194c92c2212fdc85974a5e91c2994791b2d711a07b397fc1b" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.312692 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.312871 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-69lhb" event={"ID":"4663e6c0-d0e2-49f9-a457-8bc02fefa635","Type":"ContainerDied","Data":"6601fbcb359a4f9e20e53a965724a9de1be5d88bd9cef04d2ec14058931b60fc"} Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.312918 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6601fbcb359a4f9e20e53a965724a9de1be5d88bd9cef04d2ec14058931b60fc" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.314290 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5rpwq" event={"ID":"194b2a4b-c8aa-4590-a400-f54b03904ecf","Type":"ContainerDied","Data":"2198f838d3515e5768ce0dfb700de25ee14da315ddabd1974295d25d7b6ed79a"} Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.314329 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2198f838d3515e5768ce0dfb700de25ee14da315ddabd1974295d25d7b6ed79a" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.314330 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5rpwq" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.343989 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jf5f9" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.389498 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grmzz\" (UniqueName: \"kubernetes.io/projected/5d0954fe-c339-493b-a2ca-5d30b54bc603-kube-api-access-grmzz\") pod \"5d0954fe-c339-493b-a2ca-5d30b54bc603\" (UID: \"5d0954fe-c339-493b-a2ca-5d30b54bc603\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.389576 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8x4h9\" (UniqueName: \"kubernetes.io/projected/4663e6c0-d0e2-49f9-a457-8bc02fefa635-kube-api-access-8x4h9\") pod \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.389611 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5d0954fe-c339-493b-a2ca-5d30b54bc603-db-sync-config-data\") pod \"5d0954fe-c339-493b-a2ca-5d30b54bc603\" (UID: \"5d0954fe-c339-493b-a2ca-5d30b54bc603\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.389665 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2b8wv\" (UniqueName: \"kubernetes.io/projected/194b2a4b-c8aa-4590-a400-f54b03904ecf-kube-api-access-2b8wv\") pod \"194b2a4b-c8aa-4590-a400-f54b03904ecf\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.389691 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4663e6c0-d0e2-49f9-a457-8bc02fefa635-config-data\") pod \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.389714 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-scripts\") pod \"194b2a4b-c8aa-4590-a400-f54b03904ecf\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.389762 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-fernet-keys\") pod \"194b2a4b-c8aa-4590-a400-f54b03904ecf\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.389796 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-combined-ca-bundle\") pod \"194b2a4b-c8aa-4590-a400-f54b03904ecf\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.389824 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-config-data\") pod \"194b2a4b-c8aa-4590-a400-f54b03904ecf\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.389853 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4663e6c0-d0e2-49f9-a457-8bc02fefa635-logs\") pod \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.389876 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4663e6c0-d0e2-49f9-a457-8bc02fefa635-scripts\") pod \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.389961 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4663e6c0-d0e2-49f9-a457-8bc02fefa635-combined-ca-bundle\") pod \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\" (UID: \"4663e6c0-d0e2-49f9-a457-8bc02fefa635\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.390001 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-credential-keys\") pod \"194b2a4b-c8aa-4590-a400-f54b03904ecf\" (UID: \"194b2a4b-c8aa-4590-a400-f54b03904ecf\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.390044 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d0954fe-c339-493b-a2ca-5d30b54bc603-combined-ca-bundle\") pod \"5d0954fe-c339-493b-a2ca-5d30b54bc603\" (UID: \"5d0954fe-c339-493b-a2ca-5d30b54bc603\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.393318 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4663e6c0-d0e2-49f9-a457-8bc02fefa635-logs" (OuterVolumeSpecName: "logs") pod "4663e6c0-d0e2-49f9-a457-8bc02fefa635" (UID: "4663e6c0-d0e2-49f9-a457-8bc02fefa635"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.418201 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4663e6c0-d0e2-49f9-a457-8bc02fefa635-kube-api-access-8x4h9" (OuterVolumeSpecName: "kube-api-access-8x4h9") pod "4663e6c0-d0e2-49f9-a457-8bc02fefa635" (UID: "4663e6c0-d0e2-49f9-a457-8bc02fefa635"). InnerVolumeSpecName "kube-api-access-8x4h9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.418354 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "194b2a4b-c8aa-4590-a400-f54b03904ecf" (UID: "194b2a4b-c8aa-4590-a400-f54b03904ecf"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.422154 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d0954fe-c339-493b-a2ca-5d30b54bc603-kube-api-access-grmzz" (OuterVolumeSpecName: "kube-api-access-grmzz") pod "5d0954fe-c339-493b-a2ca-5d30b54bc603" (UID: "5d0954fe-c339-493b-a2ca-5d30b54bc603"). InnerVolumeSpecName "kube-api-access-grmzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.422271 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-scripts" (OuterVolumeSpecName: "scripts") pod "194b2a4b-c8aa-4590-a400-f54b03904ecf" (UID: "194b2a4b-c8aa-4590-a400-f54b03904ecf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.424073 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/194b2a4b-c8aa-4590-a400-f54b03904ecf-kube-api-access-2b8wv" (OuterVolumeSpecName: "kube-api-access-2b8wv") pod "194b2a4b-c8aa-4590-a400-f54b03904ecf" (UID: "194b2a4b-c8aa-4590-a400-f54b03904ecf"). InnerVolumeSpecName "kube-api-access-2b8wv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.424080 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d0954fe-c339-493b-a2ca-5d30b54bc603-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "5d0954fe-c339-493b-a2ca-5d30b54bc603" (UID: "5d0954fe-c339-493b-a2ca-5d30b54bc603"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.446081 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4663e6c0-d0e2-49f9-a457-8bc02fefa635-scripts" (OuterVolumeSpecName: "scripts") pod "4663e6c0-d0e2-49f9-a457-8bc02fefa635" (UID: "4663e6c0-d0e2-49f9-a457-8bc02fefa635"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.471454 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "194b2a4b-c8aa-4590-a400-f54b03904ecf" (UID: "194b2a4b-c8aa-4590-a400-f54b03904ecf"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.481426 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-config-data" (OuterVolumeSpecName: "config-data") pod "194b2a4b-c8aa-4590-a400-f54b03904ecf" (UID: "194b2a4b-c8aa-4590-a400-f54b03904ecf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.494291 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.494322 4899 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.494332 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.494341 4899 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4663e6c0-d0e2-49f9-a457-8bc02fefa635-logs\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.494352 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4663e6c0-d0e2-49f9-a457-8bc02fefa635-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.494361 4899 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.494372 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grmzz\" (UniqueName: \"kubernetes.io/projected/5d0954fe-c339-493b-a2ca-5d30b54bc603-kube-api-access-grmzz\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.494380 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8x4h9\" (UniqueName: \"kubernetes.io/projected/4663e6c0-d0e2-49f9-a457-8bc02fefa635-kube-api-access-8x4h9\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.494394 4899 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5d0954fe-c339-493b-a2ca-5d30b54bc603-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.494405 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2b8wv\" (UniqueName: \"kubernetes.io/projected/194b2a4b-c8aa-4590-a400-f54b03904ecf-kube-api-access-2b8wv\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.494528 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4663e6c0-d0e2-49f9-a457-8bc02fefa635-config-data" (OuterVolumeSpecName: "config-data") pod "4663e6c0-d0e2-49f9-a457-8bc02fefa635" (UID: "4663e6c0-d0e2-49f9-a457-8bc02fefa635"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.509941 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d0954fe-c339-493b-a2ca-5d30b54bc603-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5d0954fe-c339-493b-a2ca-5d30b54bc603" (UID: "5d0954fe-c339-493b-a2ca-5d30b54bc603"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.528841 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "194b2a4b-c8aa-4590-a400-f54b03904ecf" (UID: "194b2a4b-c8aa-4590-a400-f54b03904ecf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.556354 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4663e6c0-d0e2-49f9-a457-8bc02fefa635-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4663e6c0-d0e2-49f9-a457-8bc02fefa635" (UID: "4663e6c0-d0e2-49f9-a457-8bc02fefa635"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.596413 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4663e6c0-d0e2-49f9-a457-8bc02fefa635-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.596453 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/194b2a4b-c8aa-4590-a400-f54b03904ecf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.596469 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4663e6c0-d0e2-49f9-a457-8bc02fefa635-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.596481 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d0954fe-c339-493b-a2ca-5d30b54bc603-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.600832 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.697992 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-dns-svc\") pod \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.698289 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-ovsdbserver-sb\") pod \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.698329 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-config\") pod \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.698721 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5zt7\" (UniqueName: \"kubernetes.io/projected/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-kube-api-access-g5zt7\") pod \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.698774 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-ovsdbserver-nb\") pod \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.698814 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-dns-swift-storage-0\") pod \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\" (UID: \"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c\") " Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.705735 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-kube-api-access-g5zt7" (OuterVolumeSpecName: "kube-api-access-g5zt7") pod "a332aa78-c64e-46f2-b2a0-6cf8be20fe4c" (UID: "a332aa78-c64e-46f2-b2a0-6cf8be20fe4c"). InnerVolumeSpecName "kube-api-access-g5zt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.764229 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a332aa78-c64e-46f2-b2a0-6cf8be20fe4c" (UID: "a332aa78-c64e-46f2-b2a0-6cf8be20fe4c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.770318 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a332aa78-c64e-46f2-b2a0-6cf8be20fe4c" (UID: "a332aa78-c64e-46f2-b2a0-6cf8be20fe4c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.779384 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a332aa78-c64e-46f2-b2a0-6cf8be20fe4c" (UID: "a332aa78-c64e-46f2-b2a0-6cf8be20fe4c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.801525 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5zt7\" (UniqueName: \"kubernetes.io/projected/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-kube-api-access-g5zt7\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.801565 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.801578 4899 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.801590 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.824553 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-config" (OuterVolumeSpecName: "config") pod "a332aa78-c64e-46f2-b2a0-6cf8be20fe4c" (UID: "a332aa78-c64e-46f2-b2a0-6cf8be20fe4c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.824665 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a332aa78-c64e-46f2-b2a0-6cf8be20fe4c" (UID: "a332aa78-c64e-46f2-b2a0-6cf8be20fe4c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.905072 4899 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:58 crc kubenswrapper[4899]: I1003 08:56:58.905119 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.324628 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.324612 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-rp67d" event={"ID":"a332aa78-c64e-46f2-b2a0-6cf8be20fe4c","Type":"ContainerDied","Data":"d532184f0bcfa04e8e024dd7b40287fbf8d19683beac56522136b3b2d65ac53c"} Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.325189 4899 scope.go:117] "RemoveContainer" containerID="262e63396b2686aaaf8ba54c61164ff36f2bc5c2eee7937e8a9d7011a99db162" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.330799 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-69lhb" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.331455 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8dc2bbb6-802d-4713-935a-353136d48619","Type":"ContainerStarted","Data":"1c56c9cbb6162d968d34d9685967acf15b3d6dc094495c7f157c8036a86f3a20"} Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.331544 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jf5f9" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.363275 4899 scope.go:117] "RemoveContainer" containerID="a0c0fe18c71a26d072505ed96e8dadb3145c15aa34ac0833ef7ed22eef4db970" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.387313 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-rp67d"] Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.401348 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-rp67d"] Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.539200 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-688fdbdf8c-rnx7k"] Oct 03 08:56:59 crc kubenswrapper[4899]: E1003 08:56:59.539562 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a332aa78-c64e-46f2-b2a0-6cf8be20fe4c" containerName="init" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.539577 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="a332aa78-c64e-46f2-b2a0-6cf8be20fe4c" containerName="init" Oct 03 08:56:59 crc kubenswrapper[4899]: E1003 08:56:59.539602 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d0954fe-c339-493b-a2ca-5d30b54bc603" containerName="barbican-db-sync" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.539609 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d0954fe-c339-493b-a2ca-5d30b54bc603" containerName="barbican-db-sync" Oct 03 08:56:59 crc kubenswrapper[4899]: E1003 08:56:59.539623 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="194b2a4b-c8aa-4590-a400-f54b03904ecf" containerName="keystone-bootstrap" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.539629 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="194b2a4b-c8aa-4590-a400-f54b03904ecf" containerName="keystone-bootstrap" Oct 03 08:56:59 crc kubenswrapper[4899]: E1003 08:56:59.539639 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4663e6c0-d0e2-49f9-a457-8bc02fefa635" containerName="placement-db-sync" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.539645 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="4663e6c0-d0e2-49f9-a457-8bc02fefa635" containerName="placement-db-sync" Oct 03 08:56:59 crc kubenswrapper[4899]: E1003 08:56:59.539668 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a332aa78-c64e-46f2-b2a0-6cf8be20fe4c" containerName="dnsmasq-dns" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.539675 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="a332aa78-c64e-46f2-b2a0-6cf8be20fe4c" containerName="dnsmasq-dns" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.539847 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="194b2a4b-c8aa-4590-a400-f54b03904ecf" containerName="keystone-bootstrap" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.539861 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="4663e6c0-d0e2-49f9-a457-8bc02fefa635" containerName="placement-db-sync" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.539877 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="a332aa78-c64e-46f2-b2a0-6cf8be20fe4c" containerName="dnsmasq-dns" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.539903 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d0954fe-c339-493b-a2ca-5d30b54bc603" containerName="barbican-db-sync" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.540829 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.551867 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.552079 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6hp7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.552275 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.552359 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.552483 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.552519 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.583511 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-688fdbdf8c-rnx7k"] Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.597222 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-568fd9848b-bw6ch"] Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.598743 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.610193 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.610618 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.622550 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.622734 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-btcbf" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.622871 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.638314 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-fernet-keys\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.638389 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-credential-keys\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.638465 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-public-tls-certs\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.638489 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-internal-tls-certs\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.638565 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-combined-ca-bundle\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.638617 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-scripts\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.638705 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-config-data\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.638733 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtq4z\" (UniqueName: \"kubernetes.io/projected/36a71770-b047-4d86-96c0-2888f9258599-kube-api-access-gtq4z\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.638797 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73b0bcea-efbe-4c62-b97c-031ea8fee918-config-data\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.638866 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73b0bcea-efbe-4c62-b97c-031ea8fee918-scripts\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.638949 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73b0bcea-efbe-4c62-b97c-031ea8fee918-internal-tls-certs\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.639003 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgqr6\" (UniqueName: \"kubernetes.io/projected/73b0bcea-efbe-4c62-b97c-031ea8fee918-kube-api-access-cgqr6\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.639044 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73b0bcea-efbe-4c62-b97c-031ea8fee918-logs\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.639183 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73b0bcea-efbe-4c62-b97c-031ea8fee918-combined-ca-bundle\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.639612 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73b0bcea-efbe-4c62-b97c-031ea8fee918-public-tls-certs\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.649386 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-568fd9848b-bw6ch"] Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.700972 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5b77574dc-vm5lv"] Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.707079 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5b77574dc-vm5lv" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.720636 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-ltqhc" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.723963 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.741305 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-config-data\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.741589 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtq4z\" (UniqueName: \"kubernetes.io/projected/36a71770-b047-4d86-96c0-2888f9258599-kube-api-access-gtq4z\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.741661 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73b0bcea-efbe-4c62-b97c-031ea8fee918-config-data\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.741727 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73b0bcea-efbe-4c62-b97c-031ea8fee918-scripts\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.741781 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73b0bcea-efbe-4c62-b97c-031ea8fee918-internal-tls-certs\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.741861 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgqr6\" (UniqueName: \"kubernetes.io/projected/73b0bcea-efbe-4c62-b97c-031ea8fee918-kube-api-access-cgqr6\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.741922 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73b0bcea-efbe-4c62-b97c-031ea8fee918-logs\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.741965 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69c8959c-64e4-43a6-9b2c-133dd960fc67-combined-ca-bundle\") pod \"barbican-worker-5b77574dc-vm5lv\" (UID: \"69c8959c-64e4-43a6-9b2c-133dd960fc67\") " pod="openstack/barbican-worker-5b77574dc-vm5lv" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.742034 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/69c8959c-64e4-43a6-9b2c-133dd960fc67-config-data-custom\") pod \"barbican-worker-5b77574dc-vm5lv\" (UID: \"69c8959c-64e4-43a6-9b2c-133dd960fc67\") " pod="openstack/barbican-worker-5b77574dc-vm5lv" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.742237 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69c8959c-64e4-43a6-9b2c-133dd960fc67-logs\") pod \"barbican-worker-5b77574dc-vm5lv\" (UID: \"69c8959c-64e4-43a6-9b2c-133dd960fc67\") " pod="openstack/barbican-worker-5b77574dc-vm5lv" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.742290 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73b0bcea-efbe-4c62-b97c-031ea8fee918-combined-ca-bundle\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.742377 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73b0bcea-efbe-4c62-b97c-031ea8fee918-public-tls-certs\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.742464 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-fernet-keys\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.742493 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-credential-keys\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.742516 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69c8959c-64e4-43a6-9b2c-133dd960fc67-config-data\") pod \"barbican-worker-5b77574dc-vm5lv\" (UID: \"69c8959c-64e4-43a6-9b2c-133dd960fc67\") " pod="openstack/barbican-worker-5b77574dc-vm5lv" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.742555 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-public-tls-certs\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.742580 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-internal-tls-certs\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.756867 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrg58\" (UniqueName: \"kubernetes.io/projected/69c8959c-64e4-43a6-9b2c-133dd960fc67-kube-api-access-mrg58\") pod \"barbican-worker-5b77574dc-vm5lv\" (UID: \"69c8959c-64e4-43a6-9b2c-133dd960fc67\") " pod="openstack/barbican-worker-5b77574dc-vm5lv" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.757262 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-combined-ca-bundle\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.757310 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-scripts\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.765941 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73b0bcea-efbe-4c62-b97c-031ea8fee918-logs\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: E1003 08:56:59.771203 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4663e6c0_d0e2_49f9_a457_8bc02fefa635.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4663e6c0_d0e2_49f9_a457_8bc02fefa635.slice/crio-6601fbcb359a4f9e20e53a965724a9de1be5d88bd9cef04d2ec14058931b60fc\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda332aa78_c64e_46f2_b2a0_6cf8be20fe4c.slice\": RecentStats: unable to find data in memory cache]" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.784281 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73b0bcea-efbe-4c62-b97c-031ea8fee918-internal-tls-certs\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.788348 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73b0bcea-efbe-4c62-b97c-031ea8fee918-scripts\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.788639 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-credential-keys\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.789173 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-scripts\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.789975 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-public-tls-certs\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.790255 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.790855 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73b0bcea-efbe-4c62-b97c-031ea8fee918-combined-ca-bundle\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.791639 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73b0bcea-efbe-4c62-b97c-031ea8fee918-public-tls-certs\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.791639 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-internal-tls-certs\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.792270 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-fernet-keys\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.792727 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-combined-ca-bundle\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.798197 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36a71770-b047-4d86-96c0-2888f9258599-config-data\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.800702 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73b0bcea-efbe-4c62-b97c-031ea8fee918-config-data\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.815148 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-59465fcb84-kkbzz"] Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.833403 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.843801 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgqr6\" (UniqueName: \"kubernetes.io/projected/73b0bcea-efbe-4c62-b97c-031ea8fee918-kube-api-access-cgqr6\") pod \"placement-568fd9848b-bw6ch\" (UID: \"73b0bcea-efbe-4c62-b97c-031ea8fee918\") " pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.844027 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.853694 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5b77574dc-vm5lv"] Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.859915 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69c8959c-64e4-43a6-9b2c-133dd960fc67-combined-ca-bundle\") pod \"barbican-worker-5b77574dc-vm5lv\" (UID: \"69c8959c-64e4-43a6-9b2c-133dd960fc67\") " pod="openstack/barbican-worker-5b77574dc-vm5lv" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.859971 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/69c8959c-64e4-43a6-9b2c-133dd960fc67-config-data-custom\") pod \"barbican-worker-5b77574dc-vm5lv\" (UID: \"69c8959c-64e4-43a6-9b2c-133dd960fc67\") " pod="openstack/barbican-worker-5b77574dc-vm5lv" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.860005 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69c8959c-64e4-43a6-9b2c-133dd960fc67-logs\") pod \"barbican-worker-5b77574dc-vm5lv\" (UID: \"69c8959c-64e4-43a6-9b2c-133dd960fc67\") " pod="openstack/barbican-worker-5b77574dc-vm5lv" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.860063 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69c8959c-64e4-43a6-9b2c-133dd960fc67-config-data\") pod \"barbican-worker-5b77574dc-vm5lv\" (UID: \"69c8959c-64e4-43a6-9b2c-133dd960fc67\") " pod="openstack/barbican-worker-5b77574dc-vm5lv" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.860115 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrg58\" (UniqueName: \"kubernetes.io/projected/69c8959c-64e4-43a6-9b2c-133dd960fc67-kube-api-access-mrg58\") pod \"barbican-worker-5b77574dc-vm5lv\" (UID: \"69c8959c-64e4-43a6-9b2c-133dd960fc67\") " pod="openstack/barbican-worker-5b77574dc-vm5lv" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.861131 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69c8959c-64e4-43a6-9b2c-133dd960fc67-logs\") pod \"barbican-worker-5b77574dc-vm5lv\" (UID: \"69c8959c-64e4-43a6-9b2c-133dd960fc67\") " pod="openstack/barbican-worker-5b77574dc-vm5lv" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.868407 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtq4z\" (UniqueName: \"kubernetes.io/projected/36a71770-b047-4d86-96c0-2888f9258599-kube-api-access-gtq4z\") pod \"keystone-688fdbdf8c-rnx7k\" (UID: \"36a71770-b047-4d86-96c0-2888f9258599\") " pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.879342 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69c8959c-64e4-43a6-9b2c-133dd960fc67-config-data\") pod \"barbican-worker-5b77574dc-vm5lv\" (UID: \"69c8959c-64e4-43a6-9b2c-133dd960fc67\") " pod="openstack/barbican-worker-5b77574dc-vm5lv" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.901555 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/69c8959c-64e4-43a6-9b2c-133dd960fc67-config-data-custom\") pod \"barbican-worker-5b77574dc-vm5lv\" (UID: \"69c8959c-64e4-43a6-9b2c-133dd960fc67\") " pod="openstack/barbican-worker-5b77574dc-vm5lv" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.905293 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.909813 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69c8959c-64e4-43a6-9b2c-133dd960fc67-combined-ca-bundle\") pod \"barbican-worker-5b77574dc-vm5lv\" (UID: \"69c8959c-64e4-43a6-9b2c-133dd960fc67\") " pod="openstack/barbican-worker-5b77574dc-vm5lv" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.909872 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-59465fcb84-kkbzz"] Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.922329 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-29djt"] Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.935995 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrg58\" (UniqueName: \"kubernetes.io/projected/69c8959c-64e4-43a6-9b2c-133dd960fc67-kube-api-access-mrg58\") pod \"barbican-worker-5b77574dc-vm5lv\" (UID: \"69c8959c-64e4-43a6-9b2c-133dd960fc67\") " pod="openstack/barbican-worker-5b77574dc-vm5lv" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.942277 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.979813 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.985048 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bd52669-a824-4b16-a840-2feed9e46a6c-combined-ca-bundle\") pod \"barbican-keystone-listener-59465fcb84-kkbzz\" (UID: \"0bd52669-a824-4b16-a840-2feed9e46a6c\") " pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.985244 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0bd52669-a824-4b16-a840-2feed9e46a6c-logs\") pod \"barbican-keystone-listener-59465fcb84-kkbzz\" (UID: \"0bd52669-a824-4b16-a840-2feed9e46a6c\") " pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.985392 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28jqp\" (UniqueName: \"kubernetes.io/projected/0bd52669-a824-4b16-a840-2feed9e46a6c-kube-api-access-28jqp\") pod \"barbican-keystone-listener-59465fcb84-kkbzz\" (UID: \"0bd52669-a824-4b16-a840-2feed9e46a6c\") " pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.985465 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bd52669-a824-4b16-a840-2feed9e46a6c-config-data\") pod \"barbican-keystone-listener-59465fcb84-kkbzz\" (UID: \"0bd52669-a824-4b16-a840-2feed9e46a6c\") " pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" Oct 03 08:56:59 crc kubenswrapper[4899]: I1003 08:56:59.985499 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0bd52669-a824-4b16-a840-2feed9e46a6c-config-data-custom\") pod \"barbican-keystone-listener-59465fcb84-kkbzz\" (UID: \"0bd52669-a824-4b16-a840-2feed9e46a6c\") " pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.009920 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-29djt"] Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.032779 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-68b787dc9b-6xw72"] Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.037530 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.048392 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.070791 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-68b787dc9b-6xw72"] Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.088147 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0bd52669-a824-4b16-a840-2feed9e46a6c-logs\") pod \"barbican-keystone-listener-59465fcb84-kkbzz\" (UID: \"0bd52669-a824-4b16-a840-2feed9e46a6c\") " pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.088194 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/987dbede-0277-4b22-b643-36d8b379542f-logs\") pod \"barbican-api-68b787dc9b-6xw72\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.088217 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-29djt\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.088248 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-config\") pod \"dnsmasq-dns-848cf88cfc-29djt\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.088267 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/987dbede-0277-4b22-b643-36d8b379542f-config-data\") pod \"barbican-api-68b787dc9b-6xw72\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.088304 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28jqp\" (UniqueName: \"kubernetes.io/projected/0bd52669-a824-4b16-a840-2feed9e46a6c-kube-api-access-28jqp\") pod \"barbican-keystone-listener-59465fcb84-kkbzz\" (UID: \"0bd52669-a824-4b16-a840-2feed9e46a6c\") " pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.088330 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76w8k\" (UniqueName: \"kubernetes.io/projected/987dbede-0277-4b22-b643-36d8b379542f-kube-api-access-76w8k\") pod \"barbican-api-68b787dc9b-6xw72\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.088357 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bd52669-a824-4b16-a840-2feed9e46a6c-config-data\") pod \"barbican-keystone-listener-59465fcb84-kkbzz\" (UID: \"0bd52669-a824-4b16-a840-2feed9e46a6c\") " pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.088382 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0bd52669-a824-4b16-a840-2feed9e46a6c-config-data-custom\") pod \"barbican-keystone-listener-59465fcb84-kkbzz\" (UID: \"0bd52669-a824-4b16-a840-2feed9e46a6c\") " pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.088401 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/987dbede-0277-4b22-b643-36d8b379542f-combined-ca-bundle\") pod \"barbican-api-68b787dc9b-6xw72\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.088418 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-29djt\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.088444 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-29djt\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.088495 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bd52669-a824-4b16-a840-2feed9e46a6c-combined-ca-bundle\") pod \"barbican-keystone-listener-59465fcb84-kkbzz\" (UID: \"0bd52669-a824-4b16-a840-2feed9e46a6c\") " pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.088559 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/987dbede-0277-4b22-b643-36d8b379542f-config-data-custom\") pod \"barbican-api-68b787dc9b-6xw72\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.088600 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwt8x\" (UniqueName: \"kubernetes.io/projected/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-kube-api-access-xwt8x\") pod \"dnsmasq-dns-848cf88cfc-29djt\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.088618 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-29djt\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.089288 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0bd52669-a824-4b16-a840-2feed9e46a6c-logs\") pod \"barbican-keystone-listener-59465fcb84-kkbzz\" (UID: \"0bd52669-a824-4b16-a840-2feed9e46a6c\") " pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.095770 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bd52669-a824-4b16-a840-2feed9e46a6c-combined-ca-bundle\") pod \"barbican-keystone-listener-59465fcb84-kkbzz\" (UID: \"0bd52669-a824-4b16-a840-2feed9e46a6c\") " pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.096395 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0bd52669-a824-4b16-a840-2feed9e46a6c-config-data-custom\") pod \"barbican-keystone-listener-59465fcb84-kkbzz\" (UID: \"0bd52669-a824-4b16-a840-2feed9e46a6c\") " pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.107546 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bd52669-a824-4b16-a840-2feed9e46a6c-config-data\") pod \"barbican-keystone-listener-59465fcb84-kkbzz\" (UID: \"0bd52669-a824-4b16-a840-2feed9e46a6c\") " pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.111694 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28jqp\" (UniqueName: \"kubernetes.io/projected/0bd52669-a824-4b16-a840-2feed9e46a6c-kube-api-access-28jqp\") pod \"barbican-keystone-listener-59465fcb84-kkbzz\" (UID: \"0bd52669-a824-4b16-a840-2feed9e46a6c\") " pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.190854 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwt8x\" (UniqueName: \"kubernetes.io/projected/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-kube-api-access-xwt8x\") pod \"dnsmasq-dns-848cf88cfc-29djt\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.190938 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-29djt\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.191115 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/987dbede-0277-4b22-b643-36d8b379542f-logs\") pod \"barbican-api-68b787dc9b-6xw72\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.191182 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-29djt\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.191247 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-config\") pod \"dnsmasq-dns-848cf88cfc-29djt\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.191286 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/987dbede-0277-4b22-b643-36d8b379542f-config-data\") pod \"barbican-api-68b787dc9b-6xw72\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.191391 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76w8k\" (UniqueName: \"kubernetes.io/projected/987dbede-0277-4b22-b643-36d8b379542f-kube-api-access-76w8k\") pod \"barbican-api-68b787dc9b-6xw72\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.191467 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/987dbede-0277-4b22-b643-36d8b379542f-combined-ca-bundle\") pod \"barbican-api-68b787dc9b-6xw72\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.191487 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-29djt\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.191534 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-29djt\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.191665 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/987dbede-0277-4b22-b643-36d8b379542f-config-data-custom\") pod \"barbican-api-68b787dc9b-6xw72\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.192140 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-29djt\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.192311 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-29djt\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.192458 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/987dbede-0277-4b22-b643-36d8b379542f-logs\") pod \"barbican-api-68b787dc9b-6xw72\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.193049 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-29djt\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.193145 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-29djt\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.194344 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-config\") pod \"dnsmasq-dns-848cf88cfc-29djt\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.197767 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/987dbede-0277-4b22-b643-36d8b379542f-combined-ca-bundle\") pod \"barbican-api-68b787dc9b-6xw72\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.199288 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/987dbede-0277-4b22-b643-36d8b379542f-config-data\") pod \"barbican-api-68b787dc9b-6xw72\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.203175 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/987dbede-0277-4b22-b643-36d8b379542f-config-data-custom\") pod \"barbican-api-68b787dc9b-6xw72\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.217066 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76w8k\" (UniqueName: \"kubernetes.io/projected/987dbede-0277-4b22-b643-36d8b379542f-kube-api-access-76w8k\") pod \"barbican-api-68b787dc9b-6xw72\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.217705 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwt8x\" (UniqueName: \"kubernetes.io/projected/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-kube-api-access-xwt8x\") pod \"dnsmasq-dns-848cf88cfc-29djt\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.250835 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5b77574dc-vm5lv" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.308753 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.334728 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.381393 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-q9f5f" event={"ID":"5545cd7a-7849-48e5-91f3-6a3a8d51e665","Type":"ContainerStarted","Data":"468a832bcbae0a299dcc63dd7c633e686d058e1925d0a6128323007644557f3f"} Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.386400 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.409472 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-q9f5f" podStartSLOduration=6.550720142 podStartE2EDuration="50.409454251s" podCreationTimestamp="2025-10-03 08:56:10 +0000 UTC" firstStartedPulling="2025-10-03 08:56:14.856255078 +0000 UTC m=+948.963740031" lastFinishedPulling="2025-10-03 08:56:58.714989187 +0000 UTC m=+992.822474140" observedRunningTime="2025-10-03 08:57:00.409171531 +0000 UTC m=+994.516656484" watchObservedRunningTime="2025-10-03 08:57:00.409454251 +0000 UTC m=+994.516939214" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.576886 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a332aa78-c64e-46f2-b2a0-6cf8be20fe4c" path="/var/lib/kubelet/pods/a332aa78-c64e-46f2-b2a0-6cf8be20fe4c/volumes" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.577877 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-568fd9848b-bw6ch"] Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.621146 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-688fdbdf8c-rnx7k"] Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.888017 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5bcb4b4796-x4jmr" podUID="2eba2a41-6a16-4f77-a699-157fb6fa7b3f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Oct 03 08:57:00 crc kubenswrapper[4899]: I1003 08:57:00.912837 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5b77574dc-vm5lv"] Oct 03 08:57:01 crc kubenswrapper[4899]: I1003 08:57:01.042463 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-29djt"] Oct 03 08:57:01 crc kubenswrapper[4899]: W1003 08:57:01.056522 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode59f15bb_5dfd_40ab_aebb_bd9ea515031b.slice/crio-e5f156276f357dd26d70d13e21944d6e903d210297f826720affe43b900a08c0 WatchSource:0}: Error finding container e5f156276f357dd26d70d13e21944d6e903d210297f826720affe43b900a08c0: Status 404 returned error can't find the container with id e5f156276f357dd26d70d13e21944d6e903d210297f826720affe43b900a08c0 Oct 03 08:57:01 crc kubenswrapper[4899]: I1003 08:57:01.061410 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7f5ccd89b4-5dfm2" podUID="bf908711-a33e-40be-b5a0-c82254721d41" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Oct 03 08:57:01 crc kubenswrapper[4899]: I1003 08:57:01.171711 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-59465fcb84-kkbzz"] Oct 03 08:57:01 crc kubenswrapper[4899]: I1003 08:57:01.179300 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-68b787dc9b-6xw72"] Oct 03 08:57:01 crc kubenswrapper[4899]: W1003 08:57:01.208046 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0bd52669_a824_4b16_a840_2feed9e46a6c.slice/crio-559c416de511170d2dee70a8f10c1e6aab49c38e467512a901040f29320e721b WatchSource:0}: Error finding container 559c416de511170d2dee70a8f10c1e6aab49c38e467512a901040f29320e721b: Status 404 returned error can't find the container with id 559c416de511170d2dee70a8f10c1e6aab49c38e467512a901040f29320e721b Oct 03 08:57:01 crc kubenswrapper[4899]: I1003 08:57:01.391940 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-29djt" event={"ID":"e59f15bb-5dfd-40ab-aebb-bd9ea515031b","Type":"ContainerStarted","Data":"e5f156276f357dd26d70d13e21944d6e903d210297f826720affe43b900a08c0"} Oct 03 08:57:01 crc kubenswrapper[4899]: I1003 08:57:01.393394 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" event={"ID":"0bd52669-a824-4b16-a840-2feed9e46a6c","Type":"ContainerStarted","Data":"559c416de511170d2dee70a8f10c1e6aab49c38e467512a901040f29320e721b"} Oct 03 08:57:01 crc kubenswrapper[4899]: I1003 08:57:01.406031 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-688fdbdf8c-rnx7k" event={"ID":"36a71770-b047-4d86-96c0-2888f9258599","Type":"ContainerStarted","Data":"28e879c503e6e5267c3210300a66c0229306d20c887a2321593f57fa5a61f7f0"} Oct 03 08:57:01 crc kubenswrapper[4899]: I1003 08:57:01.406075 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-688fdbdf8c-rnx7k" event={"ID":"36a71770-b047-4d86-96c0-2888f9258599","Type":"ContainerStarted","Data":"39e3118e2a3fc9cc181ae6be4ebe46fcf8825e2b4bdeaab92230c74b0ae02262"} Oct 03 08:57:01 crc kubenswrapper[4899]: I1003 08:57:01.406611 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:57:01 crc kubenswrapper[4899]: I1003 08:57:01.415496 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5b77574dc-vm5lv" event={"ID":"69c8959c-64e4-43a6-9b2c-133dd960fc67","Type":"ContainerStarted","Data":"e814b8fc0329e1b14bef69ebde2adf7b507f360bffb4ac308e8726c3a74af6a5"} Oct 03 08:57:01 crc kubenswrapper[4899]: I1003 08:57:01.416962 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68b787dc9b-6xw72" event={"ID":"987dbede-0277-4b22-b643-36d8b379542f","Type":"ContainerStarted","Data":"f81981cbb19b9eff70a81a591276d1a179aed8c48954f9385e02763611562a52"} Oct 03 08:57:01 crc kubenswrapper[4899]: I1003 08:57:01.418445 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-568fd9848b-bw6ch" event={"ID":"73b0bcea-efbe-4c62-b97c-031ea8fee918","Type":"ContainerStarted","Data":"12cc9bc2a468a21d5b36fb241d14951b28cc154d92f8bf0cf0ce4d97cc860f70"} Oct 03 08:57:01 crc kubenswrapper[4899]: I1003 08:57:01.418474 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-568fd9848b-bw6ch" event={"ID":"73b0bcea-efbe-4c62-b97c-031ea8fee918","Type":"ContainerStarted","Data":"7ae036d5197e38c775cffea40bcc3f9004f5ffffd3d68e96ae5fac57695482d8"} Oct 03 08:57:01 crc kubenswrapper[4899]: I1003 08:57:01.442420 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-688fdbdf8c-rnx7k" podStartSLOduration=2.442392673 podStartE2EDuration="2.442392673s" podCreationTimestamp="2025-10-03 08:56:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:57:01.436010043 +0000 UTC m=+995.543495006" watchObservedRunningTime="2025-10-03 08:57:01.442392673 +0000 UTC m=+995.549877626" Oct 03 08:57:02 crc kubenswrapper[4899]: I1003 08:57:02.463698 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-568fd9848b-bw6ch" event={"ID":"73b0bcea-efbe-4c62-b97c-031ea8fee918","Type":"ContainerStarted","Data":"4171100e40e3c42e6a93851347ef998cd2cf843a83ec8e6720da210f0f1b04e8"} Oct 03 08:57:02 crc kubenswrapper[4899]: I1003 08:57:02.464721 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:57:02 crc kubenswrapper[4899]: I1003 08:57:02.464747 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:57:02 crc kubenswrapper[4899]: I1003 08:57:02.478012 4899 generic.go:334] "Generic (PLEG): container finished" podID="e59f15bb-5dfd-40ab-aebb-bd9ea515031b" containerID="2c836952e96096387b0fe31c66803241fccc236de567814f5b5b065874de933f" exitCode=0 Oct 03 08:57:02 crc kubenswrapper[4899]: I1003 08:57:02.478124 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-29djt" event={"ID":"e59f15bb-5dfd-40ab-aebb-bd9ea515031b","Type":"ContainerDied","Data":"2c836952e96096387b0fe31c66803241fccc236de567814f5b5b065874de933f"} Oct 03 08:57:02 crc kubenswrapper[4899]: I1003 08:57:02.493654 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-568fd9848b-bw6ch" podStartSLOduration=3.493634923 podStartE2EDuration="3.493634923s" podCreationTimestamp="2025-10-03 08:56:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:57:02.482293636 +0000 UTC m=+996.589778609" watchObservedRunningTime="2025-10-03 08:57:02.493634923 +0000 UTC m=+996.601119866" Oct 03 08:57:02 crc kubenswrapper[4899]: I1003 08:57:02.498201 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68b787dc9b-6xw72" event={"ID":"987dbede-0277-4b22-b643-36d8b379542f","Type":"ContainerStarted","Data":"70d85b4c0dff5a81bb678bf9e95a38d95d816ea7ea6093e2f6e67c1780145eb7"} Oct 03 08:57:02 crc kubenswrapper[4899]: I1003 08:57:02.498249 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:02 crc kubenswrapper[4899]: I1003 08:57:02.498264 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68b787dc9b-6xw72" event={"ID":"987dbede-0277-4b22-b643-36d8b379542f","Type":"ContainerStarted","Data":"c2a2658ed205303a0b39c25e056177f5e499bc1a5ae456a53e76c8a7a00efc9b"} Oct 03 08:57:02 crc kubenswrapper[4899]: I1003 08:57:02.498277 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:02 crc kubenswrapper[4899]: I1003 08:57:02.525675 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-68b787dc9b-6xw72" podStartSLOduration=3.525657314 podStartE2EDuration="3.525657314s" podCreationTimestamp="2025-10-03 08:56:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:57:02.519759078 +0000 UTC m=+996.627244041" watchObservedRunningTime="2025-10-03 08:57:02.525657314 +0000 UTC m=+996.633142267" Oct 03 08:57:02 crc kubenswrapper[4899]: I1003 08:57:02.529695 4899 scope.go:117] "RemoveContainer" containerID="77870a2057bf43277803181b734abba5725d1f143ab9c8bf28274f3988793769" Oct 03 08:57:03 crc kubenswrapper[4899]: I1003 08:57:03.745429 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-56dc79cc94-hbxqp"] Oct 03 08:57:03 crc kubenswrapper[4899]: I1003 08:57:03.747203 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:03 crc kubenswrapper[4899]: I1003 08:57:03.749274 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 03 08:57:03 crc kubenswrapper[4899]: I1003 08:57:03.749719 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 03 08:57:03 crc kubenswrapper[4899]: I1003 08:57:03.769316 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-56dc79cc94-hbxqp"] Oct 03 08:57:03 crc kubenswrapper[4899]: I1003 08:57:03.901141 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-internal-tls-certs\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:03 crc kubenswrapper[4899]: I1003 08:57:03.901273 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-logs\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:03 crc kubenswrapper[4899]: I1003 08:57:03.901310 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-config-data\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:03 crc kubenswrapper[4899]: I1003 08:57:03.901353 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwx9t\" (UniqueName: \"kubernetes.io/projected/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-kube-api-access-bwx9t\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:03 crc kubenswrapper[4899]: I1003 08:57:03.901395 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-public-tls-certs\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:03 crc kubenswrapper[4899]: I1003 08:57:03.901446 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-config-data-custom\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:03 crc kubenswrapper[4899]: I1003 08:57:03.901516 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-combined-ca-bundle\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.003710 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-internal-tls-certs\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.003821 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-logs\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.003844 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-config-data\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.003879 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwx9t\" (UniqueName: \"kubernetes.io/projected/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-kube-api-access-bwx9t\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.003930 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-public-tls-certs\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.004100 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-config-data-custom\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.004154 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-combined-ca-bundle\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.004573 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-logs\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.010057 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-public-tls-certs\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.014630 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-config-data\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.022433 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-internal-tls-certs\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.041057 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-combined-ca-bundle\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.046641 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwx9t\" (UniqueName: \"kubernetes.io/projected/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-kube-api-access-bwx9t\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.046647 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/85b9210c-c4ec-4020-9137-f4b4fdf9dc51-config-data-custom\") pod \"barbican-api-56dc79cc94-hbxqp\" (UID: \"85b9210c-c4ec-4020-9137-f4b4fdf9dc51\") " pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.075953 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.515238 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-29djt" event={"ID":"e59f15bb-5dfd-40ab-aebb-bd9ea515031b","Type":"ContainerStarted","Data":"1e138718686cbc7548d43361a999e66c9fe663434aa67c8d601653483ac7e7c0"} Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.515787 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.530378 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5dc9cdc98b-cgxhj_a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73/neutron-httpd/1.log" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.542285 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-848cf88cfc-29djt" podStartSLOduration=5.542263715 podStartE2EDuration="5.542263715s" podCreationTimestamp="2025-10-03 08:56:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:57:04.538345462 +0000 UTC m=+998.645830435" watchObservedRunningTime="2025-10-03 08:57:04.542263715 +0000 UTC m=+998.649748668" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.554842 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5dc9cdc98b-cgxhj" event={"ID":"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73","Type":"ContainerStarted","Data":"a23ac8cf8d4767c51a114f69d3bb77e24400da4dd9413c9b91cb078b4a66625f"} Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.555067 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" event={"ID":"0bd52669-a824-4b16-a840-2feed9e46a6c","Type":"ContainerStarted","Data":"c21a9d8d09fce8237d0cfae4a6ec4d2504c0dab7917c3abc6cf29d49c0dc073f"} Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.555272 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.562083 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5b77574dc-vm5lv" event={"ID":"69c8959c-64e4-43a6-9b2c-133dd960fc67","Type":"ContainerStarted","Data":"929b70f3dabe21f8b1dc3e80d342a12e43ded6947f8dbff720eb01d3ebe0bc60"} Oct 03 08:57:04 crc kubenswrapper[4899]: I1003 08:57:04.592505 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-56dc79cc94-hbxqp"] Oct 03 08:57:04 crc kubenswrapper[4899]: W1003 08:57:04.602102 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85b9210c_c4ec_4020_9137_f4b4fdf9dc51.slice/crio-c0c8488100f76569422406c7ca4a42fbf982f34b8a8b24af5460eb6a9dc98dc2 WatchSource:0}: Error finding container c0c8488100f76569422406c7ca4a42fbf982f34b8a8b24af5460eb6a9dc98dc2: Status 404 returned error can't find the container with id c0c8488100f76569422406c7ca4a42fbf982f34b8a8b24af5460eb6a9dc98dc2 Oct 03 08:57:05 crc kubenswrapper[4899]: I1003 08:57:05.571299 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5dc9cdc98b-cgxhj_a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73/neutron-httpd/2.log" Oct 03 08:57:05 crc kubenswrapper[4899]: I1003 08:57:05.572074 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5dc9cdc98b-cgxhj_a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73/neutron-httpd/1.log" Oct 03 08:57:05 crc kubenswrapper[4899]: I1003 08:57:05.572400 4899 generic.go:334] "Generic (PLEG): container finished" podID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" containerID="a23ac8cf8d4767c51a114f69d3bb77e24400da4dd9413c9b91cb078b4a66625f" exitCode=1 Oct 03 08:57:05 crc kubenswrapper[4899]: I1003 08:57:05.572446 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5dc9cdc98b-cgxhj" event={"ID":"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73","Type":"ContainerDied","Data":"a23ac8cf8d4767c51a114f69d3bb77e24400da4dd9413c9b91cb078b4a66625f"} Oct 03 08:57:05 crc kubenswrapper[4899]: I1003 08:57:05.572514 4899 scope.go:117] "RemoveContainer" containerID="77870a2057bf43277803181b734abba5725d1f143ab9c8bf28274f3988793769" Oct 03 08:57:05 crc kubenswrapper[4899]: I1003 08:57:05.573373 4899 scope.go:117] "RemoveContainer" containerID="a23ac8cf8d4767c51a114f69d3bb77e24400da4dd9413c9b91cb078b4a66625f" Oct 03 08:57:05 crc kubenswrapper[4899]: E1003 08:57:05.573640 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"neutron-httpd\" with CrashLoopBackOff: \"back-off 20s restarting failed container=neutron-httpd pod=neutron-5dc9cdc98b-cgxhj_openstack(a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73)\"" pod="openstack/neutron-5dc9cdc98b-cgxhj" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" Oct 03 08:57:05 crc kubenswrapper[4899]: I1003 08:57:05.575392 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" event={"ID":"0bd52669-a824-4b16-a840-2feed9e46a6c","Type":"ContainerStarted","Data":"41598c5863c6fdf64be68e8e5ea7109495e94b4515f0748d44c31a6f9c8b7d17"} Oct 03 08:57:05 crc kubenswrapper[4899]: I1003 08:57:05.577880 4899 generic.go:334] "Generic (PLEG): container finished" podID="5545cd7a-7849-48e5-91f3-6a3a8d51e665" containerID="468a832bcbae0a299dcc63dd7c633e686d058e1925d0a6128323007644557f3f" exitCode=0 Oct 03 08:57:05 crc kubenswrapper[4899]: I1003 08:57:05.577956 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-q9f5f" event={"ID":"5545cd7a-7849-48e5-91f3-6a3a8d51e665","Type":"ContainerDied","Data":"468a832bcbae0a299dcc63dd7c633e686d058e1925d0a6128323007644557f3f"} Oct 03 08:57:05 crc kubenswrapper[4899]: I1003 08:57:05.580760 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5b77574dc-vm5lv" event={"ID":"69c8959c-64e4-43a6-9b2c-133dd960fc67","Type":"ContainerStarted","Data":"6837a648549278274628294e15bd3fdafae033b356de217ff6e586f7df5c4525"} Oct 03 08:57:05 crc kubenswrapper[4899]: I1003 08:57:05.584140 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56dc79cc94-hbxqp" event={"ID":"85b9210c-c4ec-4020-9137-f4b4fdf9dc51","Type":"ContainerStarted","Data":"13754c1dd46020fa802f284a72754b3976e661378d6090ca7565501d563e8561"} Oct 03 08:57:05 crc kubenswrapper[4899]: I1003 08:57:05.584184 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56dc79cc94-hbxqp" event={"ID":"85b9210c-c4ec-4020-9137-f4b4fdf9dc51","Type":"ContainerStarted","Data":"3705c1127cabc14b2d0fca1a9f0cf929ac91728caf9275dac1003cf87079cfe2"} Oct 03 08:57:05 crc kubenswrapper[4899]: I1003 08:57:05.584216 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56dc79cc94-hbxqp" event={"ID":"85b9210c-c4ec-4020-9137-f4b4fdf9dc51","Type":"ContainerStarted","Data":"c0c8488100f76569422406c7ca4a42fbf982f34b8a8b24af5460eb6a9dc98dc2"} Oct 03 08:57:05 crc kubenswrapper[4899]: I1003 08:57:05.584256 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:05 crc kubenswrapper[4899]: I1003 08:57:05.584289 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:05 crc kubenswrapper[4899]: I1003 08:57:05.634883 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5b77574dc-vm5lv" podStartSLOduration=3.641460305 podStartE2EDuration="6.634864891s" podCreationTimestamp="2025-10-03 08:56:59 +0000 UTC" firstStartedPulling="2025-10-03 08:57:00.902987633 +0000 UTC m=+995.010472586" lastFinishedPulling="2025-10-03 08:57:03.896392219 +0000 UTC m=+998.003877172" observedRunningTime="2025-10-03 08:57:05.633096874 +0000 UTC m=+999.740581847" watchObservedRunningTime="2025-10-03 08:57:05.634864891 +0000 UTC m=+999.742349854" Oct 03 08:57:05 crc kubenswrapper[4899]: I1003 08:57:05.725351 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-59465fcb84-kkbzz" podStartSLOduration=4.043581164 podStartE2EDuration="6.725324002s" podCreationTimestamp="2025-10-03 08:56:59 +0000 UTC" firstStartedPulling="2025-10-03 08:57:01.228161057 +0000 UTC m=+995.335646010" lastFinishedPulling="2025-10-03 08:57:03.909903885 +0000 UTC m=+998.017388848" observedRunningTime="2025-10-03 08:57:05.707340376 +0000 UTC m=+999.814825339" watchObservedRunningTime="2025-10-03 08:57:05.725324002 +0000 UTC m=+999.832808955" Oct 03 08:57:05 crc kubenswrapper[4899]: I1003 08:57:05.758936 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-56dc79cc94-hbxqp" podStartSLOduration=2.758918942 podStartE2EDuration="2.758918942s" podCreationTimestamp="2025-10-03 08:57:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:57:05.756066072 +0000 UTC m=+999.863551025" watchObservedRunningTime="2025-10-03 08:57:05.758918942 +0000 UTC m=+999.866403895" Oct 03 08:57:06 crc kubenswrapper[4899]: I1003 08:57:06.603485 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5dc9cdc98b-cgxhj_a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73/neutron-httpd/2.log" Oct 03 08:57:06 crc kubenswrapper[4899]: I1003 08:57:06.605577 4899 scope.go:117] "RemoveContainer" containerID="a23ac8cf8d4767c51a114f69d3bb77e24400da4dd9413c9b91cb078b4a66625f" Oct 03 08:57:06 crc kubenswrapper[4899]: E1003 08:57:06.606110 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"neutron-httpd\" with CrashLoopBackOff: \"back-off 20s restarting failed container=neutron-httpd pod=neutron-5dc9cdc98b-cgxhj_openstack(a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73)\"" pod="openstack/neutron-5dc9cdc98b-cgxhj" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.569067 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.649588 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-q9f5f" event={"ID":"5545cd7a-7849-48e5-91f3-6a3a8d51e665","Type":"ContainerDied","Data":"2963876c2a63615a3db418901840160a2bc9aad69e0acefd1ef7eacb672982bb"} Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.649861 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2963876c2a63615a3db418901840160a2bc9aad69e0acefd1ef7eacb672982bb" Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.649928 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-q9f5f" Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.735016 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5545cd7a-7849-48e5-91f3-6a3a8d51e665-etc-machine-id\") pod \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.735105 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-config-data\") pod \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.735134 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5545cd7a-7849-48e5-91f3-6a3a8d51e665-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "5545cd7a-7849-48e5-91f3-6a3a8d51e665" (UID: "5545cd7a-7849-48e5-91f3-6a3a8d51e665"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.735192 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-combined-ca-bundle\") pod \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.735333 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-scripts\") pod \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.735367 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-db-sync-config-data\") pod \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.735430 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cf5vp\" (UniqueName: \"kubernetes.io/projected/5545cd7a-7849-48e5-91f3-6a3a8d51e665-kube-api-access-cf5vp\") pod \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\" (UID: \"5545cd7a-7849-48e5-91f3-6a3a8d51e665\") " Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.735800 4899 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5545cd7a-7849-48e5-91f3-6a3a8d51e665-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.745757 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "5545cd7a-7849-48e5-91f3-6a3a8d51e665" (UID: "5545cd7a-7849-48e5-91f3-6a3a8d51e665"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.748293 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5545cd7a-7849-48e5-91f3-6a3a8d51e665-kube-api-access-cf5vp" (OuterVolumeSpecName: "kube-api-access-cf5vp") pod "5545cd7a-7849-48e5-91f3-6a3a8d51e665" (UID: "5545cd7a-7849-48e5-91f3-6a3a8d51e665"). InnerVolumeSpecName "kube-api-access-cf5vp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.765075 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-scripts" (OuterVolumeSpecName: "scripts") pod "5545cd7a-7849-48e5-91f3-6a3a8d51e665" (UID: "5545cd7a-7849-48e5-91f3-6a3a8d51e665"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.770114 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5545cd7a-7849-48e5-91f3-6a3a8d51e665" (UID: "5545cd7a-7849-48e5-91f3-6a3a8d51e665"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.818069 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-config-data" (OuterVolumeSpecName: "config-data") pod "5545cd7a-7849-48e5-91f3-6a3a8d51e665" (UID: "5545cd7a-7849-48e5-91f3-6a3a8d51e665"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.840544 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.840589 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.840625 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.840636 4899 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5545cd7a-7849-48e5-91f3-6a3a8d51e665-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:09 crc kubenswrapper[4899]: I1003 08:57:09.840648 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cf5vp\" (UniqueName: \"kubernetes.io/projected/5545cd7a-7849-48e5-91f3-6a3a8d51e665-kube-api-access-cf5vp\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:10 crc kubenswrapper[4899]: I1003 08:57:10.336199 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:10 crc kubenswrapper[4899]: I1003 08:57:10.398107 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-c8f7n"] Oct 03 08:57:10 crc kubenswrapper[4899]: I1003 08:57:10.398336 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" podUID="a50d8934-3720-4f4e-a702-bf8b43090f52" containerName="dnsmasq-dns" containerID="cri-o://d86db3b904decf04300f016bf56997d71c802a962bc948fd94ad20deebf0cf19" gracePeriod=10 Oct 03 08:57:10 crc kubenswrapper[4899]: I1003 08:57:10.661519 4899 generic.go:334] "Generic (PLEG): container finished" podID="a50d8934-3720-4f4e-a702-bf8b43090f52" containerID="d86db3b904decf04300f016bf56997d71c802a962bc948fd94ad20deebf0cf19" exitCode=0 Oct 03 08:57:10 crc kubenswrapper[4899]: I1003 08:57:10.661607 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" event={"ID":"a50d8934-3720-4f4e-a702-bf8b43090f52","Type":"ContainerDied","Data":"d86db3b904decf04300f016bf56997d71c802a962bc948fd94ad20deebf0cf19"} Oct 03 08:57:10 crc kubenswrapper[4899]: I1003 08:57:10.925205 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5bcb4b4796-x4jmr" podUID="2eba2a41-6a16-4f77-a699-157fb6fa7b3f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Oct 03 08:57:10 crc kubenswrapper[4899]: I1003 08:57:10.934398 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 08:57:10 crc kubenswrapper[4899]: E1003 08:57:10.938513 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5545cd7a-7849-48e5-91f3-6a3a8d51e665" containerName="cinder-db-sync" Oct 03 08:57:10 crc kubenswrapper[4899]: I1003 08:57:10.944136 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="5545cd7a-7849-48e5-91f3-6a3a8d51e665" containerName="cinder-db-sync" Oct 03 08:57:10 crc kubenswrapper[4899]: I1003 08:57:10.971513 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="5545cd7a-7849-48e5-91f3-6a3a8d51e665" containerName="cinder-db-sync" Oct 03 08:57:10 crc kubenswrapper[4899]: I1003 08:57:10.977085 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 08:57:10 crc kubenswrapper[4899]: I1003 08:57:10.978020 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 08:57:10 crc kubenswrapper[4899]: I1003 08:57:10.991565 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 03 08:57:10 crc kubenswrapper[4899]: I1003 08:57:10.991873 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 03 08:57:10 crc kubenswrapper[4899]: I1003 08:57:10.992083 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.003585 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-r7w6n" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.026779 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-nxxw4"] Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.032352 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.061114 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7f5ccd89b4-5dfm2" podUID="bf908711-a33e-40be-b5a0-c82254721d41" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.064830 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-nxxw4"] Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.169120 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-config\") pod \"dnsmasq-dns-6578955fd5-nxxw4\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.169182 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-config-data\") pod \"cinder-scheduler-0\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.169234 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.169256 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.169316 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-scripts\") pod \"cinder-scheduler-0\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.169345 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpqml\" (UniqueName: \"kubernetes.io/projected/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-kube-api-access-jpqml\") pod \"cinder-scheduler-0\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.169453 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxn4r\" (UniqueName: \"kubernetes.io/projected/72d39196-9303-41a7-aa15-6eb7078f3b25-kube-api-access-dxn4r\") pod \"dnsmasq-dns-6578955fd5-nxxw4\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.170284 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-nxxw4\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.170410 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-nxxw4\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.170458 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-nxxw4\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.170537 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-dns-svc\") pod \"dnsmasq-dns-6578955fd5-nxxw4\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.170570 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.202245 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.204332 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.208972 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.213590 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.273152 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-dns-svc\") pod \"dnsmasq-dns-6578955fd5-nxxw4\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.273206 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.273237 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-config\") pod \"dnsmasq-dns-6578955fd5-nxxw4\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.273252 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-config-data\") pod \"cinder-scheduler-0\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.273287 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.273301 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.273345 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-scripts\") pod \"cinder-scheduler-0\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.273362 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpqml\" (UniqueName: \"kubernetes.io/projected/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-kube-api-access-jpqml\") pod \"cinder-scheduler-0\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.273385 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxn4r\" (UniqueName: \"kubernetes.io/projected/72d39196-9303-41a7-aa15-6eb7078f3b25-kube-api-access-dxn4r\") pod \"dnsmasq-dns-6578955fd5-nxxw4\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.273427 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-nxxw4\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.273456 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-nxxw4\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.273482 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-nxxw4\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.274330 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.274374 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-nxxw4\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.275025 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-dns-svc\") pod \"dnsmasq-dns-6578955fd5-nxxw4\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.277571 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-nxxw4\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.277691 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-nxxw4\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.277760 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-config\") pod \"dnsmasq-dns-6578955fd5-nxxw4\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.282357 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.304412 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-config-data\") pod \"cinder-scheduler-0\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.305055 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-scripts\") pod \"cinder-scheduler-0\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.312801 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.315781 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxn4r\" (UniqueName: \"kubernetes.io/projected/72d39196-9303-41a7-aa15-6eb7078f3b25-kube-api-access-dxn4r\") pod \"dnsmasq-dns-6578955fd5-nxxw4\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.317470 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpqml\" (UniqueName: \"kubernetes.io/projected/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-kube-api-access-jpqml\") pod \"cinder-scheduler-0\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.375259 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-config-data\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.375487 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1b005f0-6dbb-4120-ab69-6ad8cc281434-logs\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.375581 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.375688 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-config-data-custom\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.375774 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7zb5\" (UniqueName: \"kubernetes.io/projected/d1b005f0-6dbb-4120-ab69-6ad8cc281434-kube-api-access-w7zb5\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.375858 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-scripts\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.376040 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d1b005f0-6dbb-4120-ab69-6ad8cc281434-etc-machine-id\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.381851 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.400395 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.480047 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-config-data\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.480484 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1b005f0-6dbb-4120-ab69-6ad8cc281434-logs\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.480532 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.480583 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-config-data-custom\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.480622 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7zb5\" (UniqueName: \"kubernetes.io/projected/d1b005f0-6dbb-4120-ab69-6ad8cc281434-kube-api-access-w7zb5\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.480665 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-scripts\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.480770 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d1b005f0-6dbb-4120-ab69-6ad8cc281434-etc-machine-id\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.480918 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d1b005f0-6dbb-4120-ab69-6ad8cc281434-etc-machine-id\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.481453 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1b005f0-6dbb-4120-ab69-6ad8cc281434-logs\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.485096 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-scripts\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.488098 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-config-data-custom\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.495567 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.501544 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7zb5\" (UniqueName: \"kubernetes.io/projected/d1b005f0-6dbb-4120-ab69-6ad8cc281434-kube-api-access-w7zb5\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.507909 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-config-data\") pod \"cinder-api-0\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.548333 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 08:57:11 crc kubenswrapper[4899]: I1003 08:57:11.809231 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:12 crc kubenswrapper[4899]: I1003 08:57:12.569821 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:12 crc kubenswrapper[4899]: I1003 08:57:12.858570 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.017765 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.063461 4899 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.064680 4899 scope.go:117] "RemoveContainer" containerID="a23ac8cf8d4767c51a114f69d3bb77e24400da4dd9413c9b91cb078b4a66625f" Oct 03 08:57:13 crc kubenswrapper[4899]: E1003 08:57:13.065207 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"neutron-httpd\" with CrashLoopBackOff: \"back-off 20s restarting failed container=neutron-httpd pod=neutron-5dc9cdc98b-cgxhj_openstack(a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73)\"" pod="openstack/neutron-5dc9cdc98b-cgxhj" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.070968 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/neutron-5dc9cdc98b-cgxhj" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" containerName="neutron-api" probeResult="failure" output="Get \"http://10.217.0.153:9696/\": dial tcp 10.217.0.153:9696: connect: connection refused" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.116591 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-ovsdbserver-nb\") pod \"a50d8934-3720-4f4e-a702-bf8b43090f52\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.116646 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-dns-svc\") pod \"a50d8934-3720-4f4e-a702-bf8b43090f52\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.116673 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-config\") pod \"a50d8934-3720-4f4e-a702-bf8b43090f52\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.116784 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-ovsdbserver-sb\") pod \"a50d8934-3720-4f4e-a702-bf8b43090f52\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.116875 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkl5x\" (UniqueName: \"kubernetes.io/projected/a50d8934-3720-4f4e-a702-bf8b43090f52-kube-api-access-rkl5x\") pod \"a50d8934-3720-4f4e-a702-bf8b43090f52\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.117016 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-dns-swift-storage-0\") pod \"a50d8934-3720-4f4e-a702-bf8b43090f52\" (UID: \"a50d8934-3720-4f4e-a702-bf8b43090f52\") " Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.152071 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a50d8934-3720-4f4e-a702-bf8b43090f52-kube-api-access-rkl5x" (OuterVolumeSpecName: "kube-api-access-rkl5x") pod "a50d8934-3720-4f4e-a702-bf8b43090f52" (UID: "a50d8934-3720-4f4e-a702-bf8b43090f52"). InnerVolumeSpecName "kube-api-access-rkl5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.204348 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a50d8934-3720-4f4e-a702-bf8b43090f52" (UID: "a50d8934-3720-4f4e-a702-bf8b43090f52"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.220270 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkl5x\" (UniqueName: \"kubernetes.io/projected/a50d8934-3720-4f4e-a702-bf8b43090f52-kube-api-access-rkl5x\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.220309 4899 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.235445 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-config" (OuterVolumeSpecName: "config") pod "a50d8934-3720-4f4e-a702-bf8b43090f52" (UID: "a50d8934-3720-4f4e-a702-bf8b43090f52"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.240339 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a50d8934-3720-4f4e-a702-bf8b43090f52" (UID: "a50d8934-3720-4f4e-a702-bf8b43090f52"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.281882 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a50d8934-3720-4f4e-a702-bf8b43090f52" (UID: "a50d8934-3720-4f4e-a702-bf8b43090f52"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.282540 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.283615 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a50d8934-3720-4f4e-a702-bf8b43090f52" (UID: "a50d8934-3720-4f4e-a702-bf8b43090f52"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.306207 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-nxxw4"] Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.323464 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.323518 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.323531 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.323545 4899 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a50d8934-3720-4f4e-a702-bf8b43090f52-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.343230 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.491254 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.703673 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-56dc79cc94-hbxqp" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.781232 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-68b787dc9b-6xw72"] Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.843651 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d1b005f0-6dbb-4120-ab69-6ad8cc281434","Type":"ContainerStarted","Data":"b6d94abff7ff1b3db160ea2a2c51207135e38a95d9b2b7019be3622013fe0ad8"} Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.851176 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" event={"ID":"72d39196-9303-41a7-aa15-6eb7078f3b25","Type":"ContainerStarted","Data":"39058808ec53cce944ec76e418ca4e9a629f0bfda817bbd6ebad20f6e0de45b3"} Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.882097 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8dc2bbb6-802d-4713-935a-353136d48619" containerName="ceilometer-central-agent" containerID="cri-o://b2f84a546cd6d33241b3c7fc75baa9b04e3971c497f03dfb13cc00a32d93778d" gracePeriod=30 Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.882402 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8dc2bbb6-802d-4713-935a-353136d48619","Type":"ContainerStarted","Data":"571a121826c6009b1c2446a9f53aba790bcb7fd3177d60578e25fa420df835b6"} Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.882439 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8dc2bbb6-802d-4713-935a-353136d48619" containerName="proxy-httpd" containerID="cri-o://571a121826c6009b1c2446a9f53aba790bcb7fd3177d60578e25fa420df835b6" gracePeriod=30 Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.882457 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.882496 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8dc2bbb6-802d-4713-935a-353136d48619" containerName="sg-core" containerID="cri-o://1c56c9cbb6162d968d34d9685967acf15b3d6dc094495c7f157c8036a86f3a20" gracePeriod=30 Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.882552 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8dc2bbb6-802d-4713-935a-353136d48619" containerName="ceilometer-notification-agent" containerID="cri-o://f0400fdc909b6bad5c8efce24af3f745da00e6fe9bef5a7559235ed9bfad53aa" gracePeriod=30 Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.891564 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-68b787dc9b-6xw72" podUID="987dbede-0277-4b22-b643-36d8b379542f" containerName="barbican-api-log" containerID="cri-o://c2a2658ed205303a0b39c25e056177f5e499bc1a5ae456a53e76c8a7a00efc9b" gracePeriod=30 Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.891735 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.901904 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-c8f7n" event={"ID":"a50d8934-3720-4f4e-a702-bf8b43090f52","Type":"ContainerDied","Data":"f20b007ce9317ff3e33743597f1d13e3233d73b81d03f2a7e1bd117e74078e46"} Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.901956 4899 scope.go:117] "RemoveContainer" containerID="d86db3b904decf04300f016bf56997d71c802a962bc948fd94ad20deebf0cf19" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.902578 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-68b787dc9b-6xw72" podUID="987dbede-0277-4b22-b643-36d8b379542f" containerName="barbican-api" containerID="cri-o://70d85b4c0dff5a81bb678bf9e95a38d95d816ea7ea6093e2f6e67c1780145eb7" gracePeriod=30 Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.913633 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-68b787dc9b-6xw72" podUID="987dbede-0277-4b22-b643-36d8b379542f" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.160:9311/healthcheck\": EOF" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.913770 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-68b787dc9b-6xw72" podUID="987dbede-0277-4b22-b643-36d8b379542f" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.160:9311/healthcheck\": EOF" Oct 03 08:57:13 crc kubenswrapper[4899]: I1003 08:57:13.928038 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=5.36037128 podStartE2EDuration="1m3.927983108s" podCreationTimestamp="2025-10-03 08:56:10 +0000 UTC" firstStartedPulling="2025-10-03 08:56:14.199212004 +0000 UTC m=+948.306696957" lastFinishedPulling="2025-10-03 08:57:12.766823842 +0000 UTC m=+1006.874308785" observedRunningTime="2025-10-03 08:57:13.908168113 +0000 UTC m=+1008.015653066" watchObservedRunningTime="2025-10-03 08:57:13.927983108 +0000 UTC m=+1008.035468061" Oct 03 08:57:13 crc kubenswrapper[4899]: W1003 08:57:13.999681 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf9015e0_4ee8_45f0_9788_bc7060d4c8e5.slice/crio-b20bb2156e6df48d5e85d42d4b3280db47d43f063c1cac2b68cb959d4789dfff WatchSource:0}: Error finding container b20bb2156e6df48d5e85d42d4b3280db47d43f063c1cac2b68cb959d4789dfff: Status 404 returned error can't find the container with id b20bb2156e6df48d5e85d42d4b3280db47d43f063c1cac2b68cb959d4789dfff Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.333978 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-c8f7n"] Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.343359 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-c8f7n"] Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.414583 4899 scope.go:117] "RemoveContainer" containerID="eb0d94f717510d452a2595c8ed1663ddf08621a452a626a36ff79fb72cfe9833" Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.574420 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a50d8934-3720-4f4e-a702-bf8b43090f52" path="/var/lib/kubelet/pods/a50d8934-3720-4f4e-a702-bf8b43090f52/volumes" Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.793983 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.912665 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5","Type":"ContainerStarted","Data":"b20bb2156e6df48d5e85d42d4b3280db47d43f063c1cac2b68cb959d4789dfff"} Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.930378 4899 generic.go:334] "Generic (PLEG): container finished" podID="987dbede-0277-4b22-b643-36d8b379542f" containerID="c2a2658ed205303a0b39c25e056177f5e499bc1a5ae456a53e76c8a7a00efc9b" exitCode=143 Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.930490 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68b787dc9b-6xw72" event={"ID":"987dbede-0277-4b22-b643-36d8b379542f","Type":"ContainerDied","Data":"c2a2658ed205303a0b39c25e056177f5e499bc1a5ae456a53e76c8a7a00efc9b"} Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.940786 4899 generic.go:334] "Generic (PLEG): container finished" podID="c45fd4cf-daa8-4226-bb75-c55604b5ccb6" containerID="771eb46f0fe70dd3f1edce1943952deb9b0d001bdb141f2bf295b3bc3fdafc95" exitCode=137 Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.940851 4899 generic.go:334] "Generic (PLEG): container finished" podID="c45fd4cf-daa8-4226-bb75-c55604b5ccb6" containerID="e3df63268a3c07392e47cc8dfd3b47152d0e4dda655ace80d26f905a7d356c67" exitCode=137 Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.940973 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-58b885464f-hm46m" event={"ID":"c45fd4cf-daa8-4226-bb75-c55604b5ccb6","Type":"ContainerDied","Data":"771eb46f0fe70dd3f1edce1943952deb9b0d001bdb141f2bf295b3bc3fdafc95"} Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.941007 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-58b885464f-hm46m" event={"ID":"c45fd4cf-daa8-4226-bb75-c55604b5ccb6","Type":"ContainerDied","Data":"e3df63268a3c07392e47cc8dfd3b47152d0e4dda655ace80d26f905a7d356c67"} Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.956464 4899 generic.go:334] "Generic (PLEG): container finished" podID="3dd81aae-6922-43f5-83a6-c81a06bfedea" containerID="b0ac4e0b7f50ac475ffc9d876635758dbf019e7300011f0b1340590c4b5a488a" exitCode=137 Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.956493 4899 generic.go:334] "Generic (PLEG): container finished" podID="3dd81aae-6922-43f5-83a6-c81a06bfedea" containerID="cd54378665cb0a1b706025fd9f0974ad505e52fc1e586b486c5ca7938f97af55" exitCode=137 Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.956545 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5874b7f477-6z4bj" event={"ID":"3dd81aae-6922-43f5-83a6-c81a06bfedea","Type":"ContainerDied","Data":"b0ac4e0b7f50ac475ffc9d876635758dbf019e7300011f0b1340590c4b5a488a"} Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.956574 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5874b7f477-6z4bj" event={"ID":"3dd81aae-6922-43f5-83a6-c81a06bfedea","Type":"ContainerDied","Data":"cd54378665cb0a1b706025fd9f0974ad505e52fc1e586b486c5ca7938f97af55"} Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.969499 4899 generic.go:334] "Generic (PLEG): container finished" podID="190b54d9-49f3-4d1a-aac5-ab680fafa605" containerID="a2d011c3ab331136c75d43fec472b9a75f8908d45d50156eb62098d8cf45d831" exitCode=137 Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.969551 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78dbfb6845-276zt" event={"ID":"190b54d9-49f3-4d1a-aac5-ab680fafa605","Type":"ContainerDied","Data":"a2d011c3ab331136c75d43fec472b9a75f8908d45d50156eb62098d8cf45d831"} Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.969582 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-78dbfb6845-276zt" Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.969597 4899 scope.go:117] "RemoveContainer" containerID="a2d011c3ab331136c75d43fec472b9a75f8908d45d50156eb62098d8cf45d831" Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.969822 4899 generic.go:334] "Generic (PLEG): container finished" podID="190b54d9-49f3-4d1a-aac5-ab680fafa605" containerID="a8817e1b1d2b80fb410e861131738b722f9468770e8f28f2acbbee904de13e81" exitCode=137 Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.969874 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78dbfb6845-276zt" event={"ID":"190b54d9-49f3-4d1a-aac5-ab680fafa605","Type":"ContainerDied","Data":"a8817e1b1d2b80fb410e861131738b722f9468770e8f28f2acbbee904de13e81"} Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.969903 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78dbfb6845-276zt" event={"ID":"190b54d9-49f3-4d1a-aac5-ab680fafa605","Type":"ContainerDied","Data":"ea08bd4bc83ab5bd0010902d9143901cef477c4683b60e483e3cb38762d7eca2"} Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.974986 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/190b54d9-49f3-4d1a-aac5-ab680fafa605-horizon-secret-key\") pod \"190b54d9-49f3-4d1a-aac5-ab680fafa605\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.975104 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/190b54d9-49f3-4d1a-aac5-ab680fafa605-config-data\") pod \"190b54d9-49f3-4d1a-aac5-ab680fafa605\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.975218 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/190b54d9-49f3-4d1a-aac5-ab680fafa605-scripts\") pod \"190b54d9-49f3-4d1a-aac5-ab680fafa605\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.975264 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ps45n\" (UniqueName: \"kubernetes.io/projected/190b54d9-49f3-4d1a-aac5-ab680fafa605-kube-api-access-ps45n\") pod \"190b54d9-49f3-4d1a-aac5-ab680fafa605\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.975320 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/190b54d9-49f3-4d1a-aac5-ab680fafa605-logs\") pod \"190b54d9-49f3-4d1a-aac5-ab680fafa605\" (UID: \"190b54d9-49f3-4d1a-aac5-ab680fafa605\") " Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.976778 4899 generic.go:334] "Generic (PLEG): container finished" podID="72d39196-9303-41a7-aa15-6eb7078f3b25" containerID="e65245adac283275d339b23ea90eff337b3d2abe1d09e93a4b0d3d164b8de41e" exitCode=0 Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.976812 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/190b54d9-49f3-4d1a-aac5-ab680fafa605-logs" (OuterVolumeSpecName: "logs") pod "190b54d9-49f3-4d1a-aac5-ab680fafa605" (UID: "190b54d9-49f3-4d1a-aac5-ab680fafa605"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.976881 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" event={"ID":"72d39196-9303-41a7-aa15-6eb7078f3b25","Type":"ContainerDied","Data":"e65245adac283275d339b23ea90eff337b3d2abe1d09e93a4b0d3d164b8de41e"} Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.989085 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/190b54d9-49f3-4d1a-aac5-ab680fafa605-kube-api-access-ps45n" (OuterVolumeSpecName: "kube-api-access-ps45n") pod "190b54d9-49f3-4d1a-aac5-ab680fafa605" (UID: "190b54d9-49f3-4d1a-aac5-ab680fafa605"). InnerVolumeSpecName "kube-api-access-ps45n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:14 crc kubenswrapper[4899]: I1003 08:57:14.990127 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/190b54d9-49f3-4d1a-aac5-ab680fafa605-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "190b54d9-49f3-4d1a-aac5-ab680fafa605" (UID: "190b54d9-49f3-4d1a-aac5-ab680fafa605"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.015139 4899 generic.go:334] "Generic (PLEG): container finished" podID="8dc2bbb6-802d-4713-935a-353136d48619" containerID="571a121826c6009b1c2446a9f53aba790bcb7fd3177d60578e25fa420df835b6" exitCode=0 Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.015176 4899 generic.go:334] "Generic (PLEG): container finished" podID="8dc2bbb6-802d-4713-935a-353136d48619" containerID="1c56c9cbb6162d968d34d9685967acf15b3d6dc094495c7f157c8036a86f3a20" exitCode=2 Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.015184 4899 generic.go:334] "Generic (PLEG): container finished" podID="8dc2bbb6-802d-4713-935a-353136d48619" containerID="b2f84a546cd6d33241b3c7fc75baa9b04e3971c497f03dfb13cc00a32d93778d" exitCode=0 Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.015205 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8dc2bbb6-802d-4713-935a-353136d48619","Type":"ContainerDied","Data":"571a121826c6009b1c2446a9f53aba790bcb7fd3177d60578e25fa420df835b6"} Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.015230 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8dc2bbb6-802d-4713-935a-353136d48619","Type":"ContainerDied","Data":"1c56c9cbb6162d968d34d9685967acf15b3d6dc094495c7f157c8036a86f3a20"} Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.015240 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8dc2bbb6-802d-4713-935a-353136d48619","Type":"ContainerDied","Data":"b2f84a546cd6d33241b3c7fc75baa9b04e3971c497f03dfb13cc00a32d93778d"} Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.015926 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/190b54d9-49f3-4d1a-aac5-ab680fafa605-config-data" (OuterVolumeSpecName: "config-data") pod "190b54d9-49f3-4d1a-aac5-ab680fafa605" (UID: "190b54d9-49f3-4d1a-aac5-ab680fafa605"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.051445 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/190b54d9-49f3-4d1a-aac5-ab680fafa605-scripts" (OuterVolumeSpecName: "scripts") pod "190b54d9-49f3-4d1a-aac5-ab680fafa605" (UID: "190b54d9-49f3-4d1a-aac5-ab680fafa605"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.077189 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/190b54d9-49f3-4d1a-aac5-ab680fafa605-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.077316 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ps45n\" (UniqueName: \"kubernetes.io/projected/190b54d9-49f3-4d1a-aac5-ab680fafa605-kube-api-access-ps45n\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.077371 4899 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/190b54d9-49f3-4d1a-aac5-ab680fafa605-logs\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.077423 4899 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/190b54d9-49f3-4d1a-aac5-ab680fafa605-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.077472 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/190b54d9-49f3-4d1a-aac5-ab680fafa605-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.140424 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.280927 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dd81aae-6922-43f5-83a6-c81a06bfedea-logs\") pod \"3dd81aae-6922-43f5-83a6-c81a06bfedea\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.281347 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3dd81aae-6922-43f5-83a6-c81a06bfedea-logs" (OuterVolumeSpecName: "logs") pod "3dd81aae-6922-43f5-83a6-c81a06bfedea" (UID: "3dd81aae-6922-43f5-83a6-c81a06bfedea"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.281517 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qq5f2\" (UniqueName: \"kubernetes.io/projected/3dd81aae-6922-43f5-83a6-c81a06bfedea-kube-api-access-qq5f2\") pod \"3dd81aae-6922-43f5-83a6-c81a06bfedea\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.282162 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3dd81aae-6922-43f5-83a6-c81a06bfedea-scripts\") pod \"3dd81aae-6922-43f5-83a6-c81a06bfedea\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.282207 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3dd81aae-6922-43f5-83a6-c81a06bfedea-horizon-secret-key\") pod \"3dd81aae-6922-43f5-83a6-c81a06bfedea\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.282280 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3dd81aae-6922-43f5-83a6-c81a06bfedea-config-data\") pod \"3dd81aae-6922-43f5-83a6-c81a06bfedea\" (UID: \"3dd81aae-6922-43f5-83a6-c81a06bfedea\") " Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.282920 4899 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dd81aae-6922-43f5-83a6-c81a06bfedea-logs\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.288344 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dd81aae-6922-43f5-83a6-c81a06bfedea-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "3dd81aae-6922-43f5-83a6-c81a06bfedea" (UID: "3dd81aae-6922-43f5-83a6-c81a06bfedea"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.288492 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dd81aae-6922-43f5-83a6-c81a06bfedea-kube-api-access-qq5f2" (OuterVolumeSpecName: "kube-api-access-qq5f2") pod "3dd81aae-6922-43f5-83a6-c81a06bfedea" (UID: "3dd81aae-6922-43f5-83a6-c81a06bfedea"). InnerVolumeSpecName "kube-api-access-qq5f2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.315455 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dd81aae-6922-43f5-83a6-c81a06bfedea-config-data" (OuterVolumeSpecName: "config-data") pod "3dd81aae-6922-43f5-83a6-c81a06bfedea" (UID: "3dd81aae-6922-43f5-83a6-c81a06bfedea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.316010 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-78dbfb6845-276zt"] Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.324254 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-78dbfb6845-276zt"] Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.324994 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dd81aae-6922-43f5-83a6-c81a06bfedea-scripts" (OuterVolumeSpecName: "scripts") pod "3dd81aae-6922-43f5-83a6-c81a06bfedea" (UID: "3dd81aae-6922-43f5-83a6-c81a06bfedea"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.343412 4899 scope.go:117] "RemoveContainer" containerID="a8817e1b1d2b80fb410e861131738b722f9468770e8f28f2acbbee904de13e81" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.365958 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-cdb85d7df-9hdqn" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.394742 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qq5f2\" (UniqueName: \"kubernetes.io/projected/3dd81aae-6922-43f5-83a6-c81a06bfedea-kube-api-access-qq5f2\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.394784 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3dd81aae-6922-43f5-83a6-c81a06bfedea-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.394796 4899 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3dd81aae-6922-43f5-83a6-c81a06bfedea-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.394811 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3dd81aae-6922-43f5-83a6-c81a06bfedea-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.428485 4899 scope.go:117] "RemoveContainer" containerID="a2d011c3ab331136c75d43fec472b9a75f8908d45d50156eb62098d8cf45d831" Oct 03 08:57:15 crc kubenswrapper[4899]: E1003 08:57:15.430016 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2d011c3ab331136c75d43fec472b9a75f8908d45d50156eb62098d8cf45d831\": container with ID starting with a2d011c3ab331136c75d43fec472b9a75f8908d45d50156eb62098d8cf45d831 not found: ID does not exist" containerID="a2d011c3ab331136c75d43fec472b9a75f8908d45d50156eb62098d8cf45d831" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.430068 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2d011c3ab331136c75d43fec472b9a75f8908d45d50156eb62098d8cf45d831"} err="failed to get container status \"a2d011c3ab331136c75d43fec472b9a75f8908d45d50156eb62098d8cf45d831\": rpc error: code = NotFound desc = could not find container \"a2d011c3ab331136c75d43fec472b9a75f8908d45d50156eb62098d8cf45d831\": container with ID starting with a2d011c3ab331136c75d43fec472b9a75f8908d45d50156eb62098d8cf45d831 not found: ID does not exist" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.430099 4899 scope.go:117] "RemoveContainer" containerID="a8817e1b1d2b80fb410e861131738b722f9468770e8f28f2acbbee904de13e81" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.430292 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5dc9cdc98b-cgxhj"] Oct 03 08:57:15 crc kubenswrapper[4899]: E1003 08:57:15.433333 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8817e1b1d2b80fb410e861131738b722f9468770e8f28f2acbbee904de13e81\": container with ID starting with a8817e1b1d2b80fb410e861131738b722f9468770e8f28f2acbbee904de13e81 not found: ID does not exist" containerID="a8817e1b1d2b80fb410e861131738b722f9468770e8f28f2acbbee904de13e81" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.433377 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8817e1b1d2b80fb410e861131738b722f9468770e8f28f2acbbee904de13e81"} err="failed to get container status \"a8817e1b1d2b80fb410e861131738b722f9468770e8f28f2acbbee904de13e81\": rpc error: code = NotFound desc = could not find container \"a8817e1b1d2b80fb410e861131738b722f9468770e8f28f2acbbee904de13e81\": container with ID starting with a8817e1b1d2b80fb410e861131738b722f9468770e8f28f2acbbee904de13e81 not found: ID does not exist" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.433403 4899 scope.go:117] "RemoveContainer" containerID="a2d011c3ab331136c75d43fec472b9a75f8908d45d50156eb62098d8cf45d831" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.434202 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5dc9cdc98b-cgxhj" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" containerName="neutron-api" containerID="cri-o://bbf2fd19a32c7a8f0bf403d2e7a0db3a12683c31ebf1d37d2430fdb7bf52e8ea" gracePeriod=30 Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.434969 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2d011c3ab331136c75d43fec472b9a75f8908d45d50156eb62098d8cf45d831"} err="failed to get container status \"a2d011c3ab331136c75d43fec472b9a75f8908d45d50156eb62098d8cf45d831\": rpc error: code = NotFound desc = could not find container \"a2d011c3ab331136c75d43fec472b9a75f8908d45d50156eb62098d8cf45d831\": container with ID starting with a2d011c3ab331136c75d43fec472b9a75f8908d45d50156eb62098d8cf45d831 not found: ID does not exist" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.434990 4899 scope.go:117] "RemoveContainer" containerID="a8817e1b1d2b80fb410e861131738b722f9468770e8f28f2acbbee904de13e81" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.437814 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8817e1b1d2b80fb410e861131738b722f9468770e8f28f2acbbee904de13e81"} err="failed to get container status \"a8817e1b1d2b80fb410e861131738b722f9468770e8f28f2acbbee904de13e81\": rpc error: code = NotFound desc = could not find container \"a8817e1b1d2b80fb410e861131738b722f9468770e8f28f2acbbee904de13e81\": container with ID starting with a8817e1b1d2b80fb410e861131738b722f9468770e8f28f2acbbee904de13e81 not found: ID does not exist" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.463623 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.600787 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-horizon-secret-key\") pod \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.600957 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-config-data\") pod \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.600985 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-scripts\") pod \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.601233 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-logs\") pod \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.601367 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzld8\" (UniqueName: \"kubernetes.io/projected/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-kube-api-access-hzld8\") pod \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\" (UID: \"c45fd4cf-daa8-4226-bb75-c55604b5ccb6\") " Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.603733 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-logs" (OuterVolumeSpecName: "logs") pod "c45fd4cf-daa8-4226-bb75-c55604b5ccb6" (UID: "c45fd4cf-daa8-4226-bb75-c55604b5ccb6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.606470 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "c45fd4cf-daa8-4226-bb75-c55604b5ccb6" (UID: "c45fd4cf-daa8-4226-bb75-c55604b5ccb6"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.614103 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-kube-api-access-hzld8" (OuterVolumeSpecName: "kube-api-access-hzld8") pod "c45fd4cf-daa8-4226-bb75-c55604b5ccb6" (UID: "c45fd4cf-daa8-4226-bb75-c55604b5ccb6"). InnerVolumeSpecName "kube-api-access-hzld8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.642030 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-scripts" (OuterVolumeSpecName: "scripts") pod "c45fd4cf-daa8-4226-bb75-c55604b5ccb6" (UID: "c45fd4cf-daa8-4226-bb75-c55604b5ccb6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.654916 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-config-data" (OuterVolumeSpecName: "config-data") pod "c45fd4cf-daa8-4226-bb75-c55604b5ccb6" (UID: "c45fd4cf-daa8-4226-bb75-c55604b5ccb6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.710117 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzld8\" (UniqueName: \"kubernetes.io/projected/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-kube-api-access-hzld8\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.710441 4899 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.710457 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.710486 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:15 crc kubenswrapper[4899]: I1003 08:57:15.710500 4899 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c45fd4cf-daa8-4226-bb75-c55604b5ccb6-logs\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.031781 4899 generic.go:334] "Generic (PLEG): container finished" podID="8dc2bbb6-802d-4713-935a-353136d48619" containerID="f0400fdc909b6bad5c8efce24af3f745da00e6fe9bef5a7559235ed9bfad53aa" exitCode=0 Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.031812 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8dc2bbb6-802d-4713-935a-353136d48619","Type":"ContainerDied","Data":"f0400fdc909b6bad5c8efce24af3f745da00e6fe9bef5a7559235ed9bfad53aa"} Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.035529 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-58b885464f-hm46m" event={"ID":"c45fd4cf-daa8-4226-bb75-c55604b5ccb6","Type":"ContainerDied","Data":"b633321415ebbc15bbbc16b1d2b1a0cf6af529257f4be1c36337cc7dae1f9ff3"} Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.035578 4899 scope.go:117] "RemoveContainer" containerID="771eb46f0fe70dd3f1edce1943952deb9b0d001bdb141f2bf295b3bc3fdafc95" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.035598 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-58b885464f-hm46m" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.038123 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5874b7f477-6z4bj" event={"ID":"3dd81aae-6922-43f5-83a6-c81a06bfedea","Type":"ContainerDied","Data":"ebf5e437698e9610b4663293a9bd24ff18e123e4d04b399fc797d18867f65449"} Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.038157 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5874b7f477-6z4bj" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.040963 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d1b005f0-6dbb-4120-ab69-6ad8cc281434","Type":"ContainerStarted","Data":"d6f33661ca264553cd5a253e639088556ffdba0c1175424a461da377376ecc26"} Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.046377 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" event={"ID":"72d39196-9303-41a7-aa15-6eb7078f3b25","Type":"ContainerStarted","Data":"dfece2e32f675eea8d61f56dd029aa07e09b6b957647a85c322c24e877242548"} Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.046704 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.076026 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" podStartSLOduration=6.076004394 podStartE2EDuration="6.076004394s" podCreationTimestamp="2025-10-03 08:57:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:57:16.062031514 +0000 UTC m=+1010.169516497" watchObservedRunningTime="2025-10-03 08:57:16.076004394 +0000 UTC m=+1010.183489347" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.132234 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-58b885464f-hm46m"] Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.142968 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-58b885464f-hm46m"] Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.153692 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5874b7f477-6z4bj"] Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.169473 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5874b7f477-6z4bj"] Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.265568 4899 scope.go:117] "RemoveContainer" containerID="e3df63268a3c07392e47cc8dfd3b47152d0e4dda655ace80d26f905a7d356c67" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.298270 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.298787 4899 scope.go:117] "RemoveContainer" containerID="b0ac4e0b7f50ac475ffc9d876635758dbf019e7300011f0b1340590c4b5a488a" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.425596 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-config-data\") pod \"8dc2bbb6-802d-4713-935a-353136d48619\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.425718 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8dc2bbb6-802d-4713-935a-353136d48619-log-httpd\") pod \"8dc2bbb6-802d-4713-935a-353136d48619\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.425756 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcn9b\" (UniqueName: \"kubernetes.io/projected/8dc2bbb6-802d-4713-935a-353136d48619-kube-api-access-jcn9b\") pod \"8dc2bbb6-802d-4713-935a-353136d48619\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.425779 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-combined-ca-bundle\") pod \"8dc2bbb6-802d-4713-935a-353136d48619\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.425840 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-scripts\") pod \"8dc2bbb6-802d-4713-935a-353136d48619\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.425953 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8dc2bbb6-802d-4713-935a-353136d48619-run-httpd\") pod \"8dc2bbb6-802d-4713-935a-353136d48619\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.426025 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-sg-core-conf-yaml\") pod \"8dc2bbb6-802d-4713-935a-353136d48619\" (UID: \"8dc2bbb6-802d-4713-935a-353136d48619\") " Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.428848 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8dc2bbb6-802d-4713-935a-353136d48619-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8dc2bbb6-802d-4713-935a-353136d48619" (UID: "8dc2bbb6-802d-4713-935a-353136d48619"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.429930 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-scripts" (OuterVolumeSpecName: "scripts") pod "8dc2bbb6-802d-4713-935a-353136d48619" (UID: "8dc2bbb6-802d-4713-935a-353136d48619"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.431288 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8dc2bbb6-802d-4713-935a-353136d48619-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8dc2bbb6-802d-4713-935a-353136d48619" (UID: "8dc2bbb6-802d-4713-935a-353136d48619"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.433004 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8dc2bbb6-802d-4713-935a-353136d48619-kube-api-access-jcn9b" (OuterVolumeSpecName: "kube-api-access-jcn9b") pod "8dc2bbb6-802d-4713-935a-353136d48619" (UID: "8dc2bbb6-802d-4713-935a-353136d48619"). InnerVolumeSpecName "kube-api-access-jcn9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.458198 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8dc2bbb6-802d-4713-935a-353136d48619" (UID: "8dc2bbb6-802d-4713-935a-353136d48619"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.502437 4899 scope.go:117] "RemoveContainer" containerID="cd54378665cb0a1b706025fd9f0974ad505e52fc1e586b486c5ca7938f97af55" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.506132 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8dc2bbb6-802d-4713-935a-353136d48619" (UID: "8dc2bbb6-802d-4713-935a-353136d48619"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.528096 4899 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8dc2bbb6-802d-4713-935a-353136d48619-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.528128 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcn9b\" (UniqueName: \"kubernetes.io/projected/8dc2bbb6-802d-4713-935a-353136d48619-kube-api-access-jcn9b\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.528140 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.528150 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.528160 4899 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8dc2bbb6-802d-4713-935a-353136d48619-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.528169 4899 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.547414 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="190b54d9-49f3-4d1a-aac5-ab680fafa605" path="/var/lib/kubelet/pods/190b54d9-49f3-4d1a-aac5-ab680fafa605/volumes" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.551695 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dd81aae-6922-43f5-83a6-c81a06bfedea" path="/var/lib/kubelet/pods/3dd81aae-6922-43f5-83a6-c81a06bfedea/volumes" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.552458 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c45fd4cf-daa8-4226-bb75-c55604b5ccb6" path="/var/lib/kubelet/pods/c45fd4cf-daa8-4226-bb75-c55604b5ccb6/volumes" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.589086 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-config-data" (OuterVolumeSpecName: "config-data") pod "8dc2bbb6-802d-4713-935a-353136d48619" (UID: "8dc2bbb6-802d-4713-935a-353136d48619"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:16 crc kubenswrapper[4899]: I1003 08:57:16.629513 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dc2bbb6-802d-4713-935a-353136d48619-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.060327 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5dc9cdc98b-cgxhj_a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73/neutron-httpd/2.log" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.062029 4899 generic.go:334] "Generic (PLEG): container finished" podID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" containerID="bbf2fd19a32c7a8f0bf403d2e7a0db3a12683c31ebf1d37d2430fdb7bf52e8ea" exitCode=0 Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.062118 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5dc9cdc98b-cgxhj" event={"ID":"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73","Type":"ContainerDied","Data":"bbf2fd19a32c7a8f0bf403d2e7a0db3a12683c31ebf1d37d2430fdb7bf52e8ea"} Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.064949 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d1b005f0-6dbb-4120-ab69-6ad8cc281434","Type":"ContainerStarted","Data":"e6a142e414a1c2336da2732a8db570355ce6a6abe5e92d6816dbfcf33a5ebe56"} Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.065126 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="d1b005f0-6dbb-4120-ab69-6ad8cc281434" containerName="cinder-api-log" containerID="cri-o://d6f33661ca264553cd5a253e639088556ffdba0c1175424a461da377376ecc26" gracePeriod=30 Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.065185 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.065216 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="d1b005f0-6dbb-4120-ab69-6ad8cc281434" containerName="cinder-api" containerID="cri-o://e6a142e414a1c2336da2732a8db570355ce6a6abe5e92d6816dbfcf33a5ebe56" gracePeriod=30 Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.076534 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8dc2bbb6-802d-4713-935a-353136d48619","Type":"ContainerDied","Data":"ca2179b0fb2273286072b1d0f3864b9a38705b4a5115caed667d98e24b85da17"} Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.076825 4899 scope.go:117] "RemoveContainer" containerID="571a121826c6009b1c2446a9f53aba790bcb7fd3177d60578e25fa420df835b6" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.077077 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.083670 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5","Type":"ContainerStarted","Data":"7397b89fa20b6c5df66f8651b4d6ad8159410551f8e40b3204e76ad9b9ed4336"} Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.083868 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5","Type":"ContainerStarted","Data":"af7b3d6c10c134c1c1e3d50f4f3161c2ea1697ba7b6e944c4159ee43548563ff"} Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.096049 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.09603007 podStartE2EDuration="6.09603007s" podCreationTimestamp="2025-10-03 08:57:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:57:17.09223895 +0000 UTC m=+1011.199723903" watchObservedRunningTime="2025-10-03 08:57:17.09603007 +0000 UTC m=+1011.203515023" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.113310 4899 scope.go:117] "RemoveContainer" containerID="1c56c9cbb6162d968d34d9685967acf15b3d6dc094495c7f157c8036a86f3a20" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.119793 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.687659419 podStartE2EDuration="7.119775859s" podCreationTimestamp="2025-10-03 08:57:10 +0000 UTC" firstStartedPulling="2025-10-03 08:57:14.007721203 +0000 UTC m=+1008.115206156" lastFinishedPulling="2025-10-03 08:57:15.439837643 +0000 UTC m=+1009.547322596" observedRunningTime="2025-10-03 08:57:17.117213178 +0000 UTC m=+1011.224698131" watchObservedRunningTime="2025-10-03 08:57:17.119775859 +0000 UTC m=+1011.227260812" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.141884 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.145019 4899 scope.go:117] "RemoveContainer" containerID="f0400fdc909b6bad5c8efce24af3f745da00e6fe9bef5a7559235ed9bfad53aa" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.154088 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.171822 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:57:17 crc kubenswrapper[4899]: E1003 08:57:17.172308 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dc2bbb6-802d-4713-935a-353136d48619" containerName="proxy-httpd" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172330 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dc2bbb6-802d-4713-935a-353136d48619" containerName="proxy-httpd" Oct 03 08:57:17 crc kubenswrapper[4899]: E1003 08:57:17.172394 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dd81aae-6922-43f5-83a6-c81a06bfedea" containerName="horizon" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172406 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dd81aae-6922-43f5-83a6-c81a06bfedea" containerName="horizon" Oct 03 08:57:17 crc kubenswrapper[4899]: E1003 08:57:17.172429 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dc2bbb6-802d-4713-935a-353136d48619" containerName="sg-core" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172437 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dc2bbb6-802d-4713-935a-353136d48619" containerName="sg-core" Oct 03 08:57:17 crc kubenswrapper[4899]: E1003 08:57:17.172450 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a50d8934-3720-4f4e-a702-bf8b43090f52" containerName="dnsmasq-dns" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172458 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="a50d8934-3720-4f4e-a702-bf8b43090f52" containerName="dnsmasq-dns" Oct 03 08:57:17 crc kubenswrapper[4899]: E1003 08:57:17.172478 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dd81aae-6922-43f5-83a6-c81a06bfedea" containerName="horizon-log" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172486 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dd81aae-6922-43f5-83a6-c81a06bfedea" containerName="horizon-log" Oct 03 08:57:17 crc kubenswrapper[4899]: E1003 08:57:17.172497 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dc2bbb6-802d-4713-935a-353136d48619" containerName="ceilometer-notification-agent" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172503 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dc2bbb6-802d-4713-935a-353136d48619" containerName="ceilometer-notification-agent" Oct 03 08:57:17 crc kubenswrapper[4899]: E1003 08:57:17.172518 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c45fd4cf-daa8-4226-bb75-c55604b5ccb6" containerName="horizon-log" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172524 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="c45fd4cf-daa8-4226-bb75-c55604b5ccb6" containerName="horizon-log" Oct 03 08:57:17 crc kubenswrapper[4899]: E1003 08:57:17.172531 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="190b54d9-49f3-4d1a-aac5-ab680fafa605" containerName="horizon" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172537 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="190b54d9-49f3-4d1a-aac5-ab680fafa605" containerName="horizon" Oct 03 08:57:17 crc kubenswrapper[4899]: E1003 08:57:17.172548 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dc2bbb6-802d-4713-935a-353136d48619" containerName="ceilometer-central-agent" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172554 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dc2bbb6-802d-4713-935a-353136d48619" containerName="ceilometer-central-agent" Oct 03 08:57:17 crc kubenswrapper[4899]: E1003 08:57:17.172565 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="190b54d9-49f3-4d1a-aac5-ab680fafa605" containerName="horizon-log" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172572 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="190b54d9-49f3-4d1a-aac5-ab680fafa605" containerName="horizon-log" Oct 03 08:57:17 crc kubenswrapper[4899]: E1003 08:57:17.172585 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a50d8934-3720-4f4e-a702-bf8b43090f52" containerName="init" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172590 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="a50d8934-3720-4f4e-a702-bf8b43090f52" containerName="init" Oct 03 08:57:17 crc kubenswrapper[4899]: E1003 08:57:17.172599 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c45fd4cf-daa8-4226-bb75-c55604b5ccb6" containerName="horizon" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172605 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="c45fd4cf-daa8-4226-bb75-c55604b5ccb6" containerName="horizon" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172780 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dc2bbb6-802d-4713-935a-353136d48619" containerName="ceilometer-central-agent" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172790 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="a50d8934-3720-4f4e-a702-bf8b43090f52" containerName="dnsmasq-dns" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172804 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dd81aae-6922-43f5-83a6-c81a06bfedea" containerName="horizon-log" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172814 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dc2bbb6-802d-4713-935a-353136d48619" containerName="sg-core" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172825 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="c45fd4cf-daa8-4226-bb75-c55604b5ccb6" containerName="horizon-log" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172834 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dc2bbb6-802d-4713-935a-353136d48619" containerName="ceilometer-notification-agent" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172850 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dd81aae-6922-43f5-83a6-c81a06bfedea" containerName="horizon" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172861 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="c45fd4cf-daa8-4226-bb75-c55604b5ccb6" containerName="horizon" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172871 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dc2bbb6-802d-4713-935a-353136d48619" containerName="proxy-httpd" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172879 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="190b54d9-49f3-4d1a-aac5-ab680fafa605" containerName="horizon-log" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.172904 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="190b54d9-49f3-4d1a-aac5-ab680fafa605" containerName="horizon" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.175458 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.188015 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.188757 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.199127 4899 scope.go:117] "RemoveContainer" containerID="b2f84a546cd6d33241b3c7fc75baa9b04e3971c497f03dfb13cc00a32d93778d" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.217982 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.386942 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-config-data\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.387241 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.387424 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887fe423-dd24-43d2-b8df-dcdb615a9fda-run-httpd\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.387551 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.387593 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-scripts\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.387619 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htzld\" (UniqueName: \"kubernetes.io/projected/887fe423-dd24-43d2-b8df-dcdb615a9fda-kube-api-access-htzld\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.387718 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887fe423-dd24-43d2-b8df-dcdb615a9fda-log-httpd\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.489709 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887fe423-dd24-43d2-b8df-dcdb615a9fda-log-httpd\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.489806 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-config-data\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.489838 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.489910 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887fe423-dd24-43d2-b8df-dcdb615a9fda-run-httpd\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.489926 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.489952 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htzld\" (UniqueName: \"kubernetes.io/projected/887fe423-dd24-43d2-b8df-dcdb615a9fda-kube-api-access-htzld\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.489968 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-scripts\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.490421 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887fe423-dd24-43d2-b8df-dcdb615a9fda-log-httpd\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.490742 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887fe423-dd24-43d2-b8df-dcdb615a9fda-run-httpd\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.496849 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.497099 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.504998 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-config-data\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.509376 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htzld\" (UniqueName: \"kubernetes.io/projected/887fe423-dd24-43d2-b8df-dcdb615a9fda-kube-api-access-htzld\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.509573 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-scripts\") pod \"ceilometer-0\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.613616 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.767630 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.782156 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5dc9cdc98b-cgxhj_a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73/neutron-httpd/2.log" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.782694 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.900746 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7zb5\" (UniqueName: \"kubernetes.io/projected/d1b005f0-6dbb-4120-ab69-6ad8cc281434-kube-api-access-w7zb5\") pod \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.900826 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-combined-ca-bundle\") pod \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.900884 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-httpd-config\") pod \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.900952 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d1b005f0-6dbb-4120-ab69-6ad8cc281434-etc-machine-id\") pod \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.900974 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-ovndb-tls-certs\") pod \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.900992 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1b005f0-6dbb-4120-ab69-6ad8cc281434-logs\") pod \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.901015 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-config-data-custom\") pod \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.901053 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-config-data\") pod \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.901099 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6v9k\" (UniqueName: \"kubernetes.io/projected/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-kube-api-access-v6v9k\") pod \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.901142 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-scripts\") pod \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.901174 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-config\") pod \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\" (UID: \"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73\") " Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.901195 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-combined-ca-bundle\") pod \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\" (UID: \"d1b005f0-6dbb-4120-ab69-6ad8cc281434\") " Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.901914 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1b005f0-6dbb-4120-ab69-6ad8cc281434-logs" (OuterVolumeSpecName: "logs") pod "d1b005f0-6dbb-4120-ab69-6ad8cc281434" (UID: "d1b005f0-6dbb-4120-ab69-6ad8cc281434"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.902321 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d1b005f0-6dbb-4120-ab69-6ad8cc281434-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "d1b005f0-6dbb-4120-ab69-6ad8cc281434" (UID: "d1b005f0-6dbb-4120-ab69-6ad8cc281434"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.907015 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" (UID: "a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.908120 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1b005f0-6dbb-4120-ab69-6ad8cc281434-kube-api-access-w7zb5" (OuterVolumeSpecName: "kube-api-access-w7zb5") pod "d1b005f0-6dbb-4120-ab69-6ad8cc281434" (UID: "d1b005f0-6dbb-4120-ab69-6ad8cc281434"). InnerVolumeSpecName "kube-api-access-w7zb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.908222 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-kube-api-access-v6v9k" (OuterVolumeSpecName: "kube-api-access-v6v9k") pod "a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" (UID: "a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73"). InnerVolumeSpecName "kube-api-access-v6v9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.911059 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d1b005f0-6dbb-4120-ab69-6ad8cc281434" (UID: "d1b005f0-6dbb-4120-ab69-6ad8cc281434"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.913317 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-scripts" (OuterVolumeSpecName: "scripts") pod "d1b005f0-6dbb-4120-ab69-6ad8cc281434" (UID: "d1b005f0-6dbb-4120-ab69-6ad8cc281434"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.933236 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d1b005f0-6dbb-4120-ab69-6ad8cc281434" (UID: "d1b005f0-6dbb-4120-ab69-6ad8cc281434"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.958279 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" (UID: "a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.973523 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-config-data" (OuterVolumeSpecName: "config-data") pod "d1b005f0-6dbb-4120-ab69-6ad8cc281434" (UID: "d1b005f0-6dbb-4120-ab69-6ad8cc281434"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.981198 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-config" (OuterVolumeSpecName: "config") pod "a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" (UID: "a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:17 crc kubenswrapper[4899]: I1003 08:57:17.991056 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" (UID: "a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.002424 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6v9k\" (UniqueName: \"kubernetes.io/projected/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-kube-api-access-v6v9k\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.002458 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.002467 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.002477 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.002485 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7zb5\" (UniqueName: \"kubernetes.io/projected/d1b005f0-6dbb-4120-ab69-6ad8cc281434-kube-api-access-w7zb5\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.002494 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.002503 4899 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.002512 4899 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d1b005f0-6dbb-4120-ab69-6ad8cc281434-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.002520 4899 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1b005f0-6dbb-4120-ab69-6ad8cc281434-logs\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.002527 4899 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.002534 4899 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.002542 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1b005f0-6dbb-4120-ab69-6ad8cc281434-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.096289 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.096988 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5dc9cdc98b-cgxhj_a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73/neutron-httpd/2.log" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.097684 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5dc9cdc98b-cgxhj" event={"ID":"a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73","Type":"ContainerDied","Data":"fbba50d7ae43014968b668332d8762974ceb1eeeb294cf2e7a6ea91b83979406"} Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.097718 4899 scope.go:117] "RemoveContainer" containerID="a23ac8cf8d4767c51a114f69d3bb77e24400da4dd9413c9b91cb078b4a66625f" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.097782 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5dc9cdc98b-cgxhj" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.101728 4899 generic.go:334] "Generic (PLEG): container finished" podID="d1b005f0-6dbb-4120-ab69-6ad8cc281434" containerID="e6a142e414a1c2336da2732a8db570355ce6a6abe5e92d6816dbfcf33a5ebe56" exitCode=0 Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.101817 4899 generic.go:334] "Generic (PLEG): container finished" podID="d1b005f0-6dbb-4120-ab69-6ad8cc281434" containerID="d6f33661ca264553cd5a253e639088556ffdba0c1175424a461da377376ecc26" exitCode=143 Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.101863 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d1b005f0-6dbb-4120-ab69-6ad8cc281434","Type":"ContainerDied","Data":"e6a142e414a1c2336da2732a8db570355ce6a6abe5e92d6816dbfcf33a5ebe56"} Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.101904 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d1b005f0-6dbb-4120-ab69-6ad8cc281434","Type":"ContainerDied","Data":"d6f33661ca264553cd5a253e639088556ffdba0c1175424a461da377376ecc26"} Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.101914 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d1b005f0-6dbb-4120-ab69-6ad8cc281434","Type":"ContainerDied","Data":"b6d94abff7ff1b3db160ea2a2c51207135e38a95d9b2b7019be3622013fe0ad8"} Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.101972 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: W1003 08:57:18.108092 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod887fe423_dd24_43d2_b8df_dcdb615a9fda.slice/crio-d7eaaf291f50e8c8248d5764ccf06be5add2e8200c103d5497c6dde8edf8acfb WatchSource:0}: Error finding container d7eaaf291f50e8c8248d5764ccf06be5add2e8200c103d5497c6dde8edf8acfb: Status 404 returned error can't find the container with id d7eaaf291f50e8c8248d5764ccf06be5add2e8200c103d5497c6dde8edf8acfb Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.128835 4899 scope.go:117] "RemoveContainer" containerID="bbf2fd19a32c7a8f0bf403d2e7a0db3a12683c31ebf1d37d2430fdb7bf52e8ea" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.131695 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5dc9cdc98b-cgxhj"] Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.138921 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5dc9cdc98b-cgxhj"] Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.149223 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.168362 4899 scope.go:117] "RemoveContainer" containerID="e6a142e414a1c2336da2732a8db570355ce6a6abe5e92d6816dbfcf33a5ebe56" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.172313 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.179553 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 03 08:57:18 crc kubenswrapper[4899]: E1003 08:57:18.180077 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1b005f0-6dbb-4120-ab69-6ad8cc281434" containerName="cinder-api-log" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.180102 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1b005f0-6dbb-4120-ab69-6ad8cc281434" containerName="cinder-api-log" Oct 03 08:57:18 crc kubenswrapper[4899]: E1003 08:57:18.180128 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" containerName="neutron-api" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.180137 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" containerName="neutron-api" Oct 03 08:57:18 crc kubenswrapper[4899]: E1003 08:57:18.180156 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1b005f0-6dbb-4120-ab69-6ad8cc281434" containerName="cinder-api" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.180164 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1b005f0-6dbb-4120-ab69-6ad8cc281434" containerName="cinder-api" Oct 03 08:57:18 crc kubenswrapper[4899]: E1003 08:57:18.180180 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" containerName="neutron-httpd" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.180186 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" containerName="neutron-httpd" Oct 03 08:57:18 crc kubenswrapper[4899]: E1003 08:57:18.180205 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" containerName="neutron-httpd" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.180212 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" containerName="neutron-httpd" Oct 03 08:57:18 crc kubenswrapper[4899]: E1003 08:57:18.180227 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" containerName="neutron-httpd" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.180234 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" containerName="neutron-httpd" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.180452 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1b005f0-6dbb-4120-ab69-6ad8cc281434" containerName="cinder-api-log" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.180475 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" containerName="neutron-httpd" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.180494 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1b005f0-6dbb-4120-ab69-6ad8cc281434" containerName="cinder-api" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.180506 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" containerName="neutron-httpd" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.180520 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" containerName="neutron-api" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.180975 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" containerName="neutron-httpd" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.181648 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.185632 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.185924 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.186815 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.189205 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.198866 4899 scope.go:117] "RemoveContainer" containerID="d6f33661ca264553cd5a253e639088556ffdba0c1175424a461da377376ecc26" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.220615 4899 scope.go:117] "RemoveContainer" containerID="e6a142e414a1c2336da2732a8db570355ce6a6abe5e92d6816dbfcf33a5ebe56" Oct 03 08:57:18 crc kubenswrapper[4899]: E1003 08:57:18.221125 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6a142e414a1c2336da2732a8db570355ce6a6abe5e92d6816dbfcf33a5ebe56\": container with ID starting with e6a142e414a1c2336da2732a8db570355ce6a6abe5e92d6816dbfcf33a5ebe56 not found: ID does not exist" containerID="e6a142e414a1c2336da2732a8db570355ce6a6abe5e92d6816dbfcf33a5ebe56" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.221169 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6a142e414a1c2336da2732a8db570355ce6a6abe5e92d6816dbfcf33a5ebe56"} err="failed to get container status \"e6a142e414a1c2336da2732a8db570355ce6a6abe5e92d6816dbfcf33a5ebe56\": rpc error: code = NotFound desc = could not find container \"e6a142e414a1c2336da2732a8db570355ce6a6abe5e92d6816dbfcf33a5ebe56\": container with ID starting with e6a142e414a1c2336da2732a8db570355ce6a6abe5e92d6816dbfcf33a5ebe56 not found: ID does not exist" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.221193 4899 scope.go:117] "RemoveContainer" containerID="d6f33661ca264553cd5a253e639088556ffdba0c1175424a461da377376ecc26" Oct 03 08:57:18 crc kubenswrapper[4899]: E1003 08:57:18.221552 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6f33661ca264553cd5a253e639088556ffdba0c1175424a461da377376ecc26\": container with ID starting with d6f33661ca264553cd5a253e639088556ffdba0c1175424a461da377376ecc26 not found: ID does not exist" containerID="d6f33661ca264553cd5a253e639088556ffdba0c1175424a461da377376ecc26" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.221587 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6f33661ca264553cd5a253e639088556ffdba0c1175424a461da377376ecc26"} err="failed to get container status \"d6f33661ca264553cd5a253e639088556ffdba0c1175424a461da377376ecc26\": rpc error: code = NotFound desc = could not find container \"d6f33661ca264553cd5a253e639088556ffdba0c1175424a461da377376ecc26\": container with ID starting with d6f33661ca264553cd5a253e639088556ffdba0c1175424a461da377376ecc26 not found: ID does not exist" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.221610 4899 scope.go:117] "RemoveContainer" containerID="e6a142e414a1c2336da2732a8db570355ce6a6abe5e92d6816dbfcf33a5ebe56" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.221925 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6a142e414a1c2336da2732a8db570355ce6a6abe5e92d6816dbfcf33a5ebe56"} err="failed to get container status \"e6a142e414a1c2336da2732a8db570355ce6a6abe5e92d6816dbfcf33a5ebe56\": rpc error: code = NotFound desc = could not find container \"e6a142e414a1c2336da2732a8db570355ce6a6abe5e92d6816dbfcf33a5ebe56\": container with ID starting with e6a142e414a1c2336da2732a8db570355ce6a6abe5e92d6816dbfcf33a5ebe56 not found: ID does not exist" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.222007 4899 scope.go:117] "RemoveContainer" containerID="d6f33661ca264553cd5a253e639088556ffdba0c1175424a461da377376ecc26" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.222327 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6f33661ca264553cd5a253e639088556ffdba0c1175424a461da377376ecc26"} err="failed to get container status \"d6f33661ca264553cd5a253e639088556ffdba0c1175424a461da377376ecc26\": rpc error: code = NotFound desc = could not find container \"d6f33661ca264553cd5a253e639088556ffdba0c1175424a461da377376ecc26\": container with ID starting with d6f33661ca264553cd5a253e639088556ffdba0c1175424a461da377376ecc26 not found: ID does not exist" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.310108 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65edf39f-decc-476a-a5f3-b3d2d785ae67-scripts\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.310439 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65edf39f-decc-476a-a5f3-b3d2d785ae67-config-data-custom\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.310605 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65edf39f-decc-476a-a5f3-b3d2d785ae67-logs\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.310681 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65edf39f-decc-476a-a5f3-b3d2d785ae67-public-tls-certs\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.310760 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69r2d\" (UniqueName: \"kubernetes.io/projected/65edf39f-decc-476a-a5f3-b3d2d785ae67-kube-api-access-69r2d\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.310829 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/65edf39f-decc-476a-a5f3-b3d2d785ae67-etc-machine-id\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.311059 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65edf39f-decc-476a-a5f3-b3d2d785ae67-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.311110 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65edf39f-decc-476a-a5f3-b3d2d785ae67-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.311143 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65edf39f-decc-476a-a5f3-b3d2d785ae67-config-data\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.317144 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-68b787dc9b-6xw72" podUID="987dbede-0277-4b22-b643-36d8b379542f" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.160:9311/healthcheck\": read tcp 10.217.0.2:56974->10.217.0.160:9311: read: connection reset by peer" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.317171 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-68b787dc9b-6xw72" podUID="987dbede-0277-4b22-b643-36d8b379542f" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.160:9311/healthcheck\": read tcp 10.217.0.2:56976->10.217.0.160:9311: read: connection reset by peer" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.412806 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65edf39f-decc-476a-a5f3-b3d2d785ae67-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.412859 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65edf39f-decc-476a-a5f3-b3d2d785ae67-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.412880 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65edf39f-decc-476a-a5f3-b3d2d785ae67-config-data\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.412943 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65edf39f-decc-476a-a5f3-b3d2d785ae67-scripts\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.412963 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65edf39f-decc-476a-a5f3-b3d2d785ae67-config-data-custom\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.413092 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65edf39f-decc-476a-a5f3-b3d2d785ae67-logs\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.413115 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65edf39f-decc-476a-a5f3-b3d2d785ae67-public-tls-certs\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.413150 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69r2d\" (UniqueName: \"kubernetes.io/projected/65edf39f-decc-476a-a5f3-b3d2d785ae67-kube-api-access-69r2d\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.413177 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/65edf39f-decc-476a-a5f3-b3d2d785ae67-etc-machine-id\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.413253 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/65edf39f-decc-476a-a5f3-b3d2d785ae67-etc-machine-id\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.413502 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65edf39f-decc-476a-a5f3-b3d2d785ae67-logs\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.417275 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65edf39f-decc-476a-a5f3-b3d2d785ae67-public-tls-certs\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.417316 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65edf39f-decc-476a-a5f3-b3d2d785ae67-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.418510 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65edf39f-decc-476a-a5f3-b3d2d785ae67-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.420337 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65edf39f-decc-476a-a5f3-b3d2d785ae67-config-data\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.423443 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65edf39f-decc-476a-a5f3-b3d2d785ae67-config-data-custom\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.427050 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65edf39f-decc-476a-a5f3-b3d2d785ae67-scripts\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.435350 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69r2d\" (UniqueName: \"kubernetes.io/projected/65edf39f-decc-476a-a5f3-b3d2d785ae67-kube-api-access-69r2d\") pod \"cinder-api-0\" (UID: \"65edf39f-decc-476a-a5f3-b3d2d785ae67\") " pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.513365 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.548447 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8dc2bbb6-802d-4713-935a-353136d48619" path="/var/lib/kubelet/pods/8dc2bbb6-802d-4713-935a-353136d48619/volumes" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.549258 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73" path="/var/lib/kubelet/pods/a8bb9f6f-60d4-4900-8177-cf2ff3d8aa73/volumes" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.550298 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1b005f0-6dbb-4120-ab69-6ad8cc281434" path="/var/lib/kubelet/pods/d1b005f0-6dbb-4120-ab69-6ad8cc281434/volumes" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.770056 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.921385 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/987dbede-0277-4b22-b643-36d8b379542f-logs\") pod \"987dbede-0277-4b22-b643-36d8b379542f\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.921441 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/987dbede-0277-4b22-b643-36d8b379542f-config-data-custom\") pod \"987dbede-0277-4b22-b643-36d8b379542f\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.921483 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/987dbede-0277-4b22-b643-36d8b379542f-config-data\") pod \"987dbede-0277-4b22-b643-36d8b379542f\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.921568 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76w8k\" (UniqueName: \"kubernetes.io/projected/987dbede-0277-4b22-b643-36d8b379542f-kube-api-access-76w8k\") pod \"987dbede-0277-4b22-b643-36d8b379542f\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.921686 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/987dbede-0277-4b22-b643-36d8b379542f-combined-ca-bundle\") pod \"987dbede-0277-4b22-b643-36d8b379542f\" (UID: \"987dbede-0277-4b22-b643-36d8b379542f\") " Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.922076 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/987dbede-0277-4b22-b643-36d8b379542f-logs" (OuterVolumeSpecName: "logs") pod "987dbede-0277-4b22-b643-36d8b379542f" (UID: "987dbede-0277-4b22-b643-36d8b379542f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.925735 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/987dbede-0277-4b22-b643-36d8b379542f-kube-api-access-76w8k" (OuterVolumeSpecName: "kube-api-access-76w8k") pod "987dbede-0277-4b22-b643-36d8b379542f" (UID: "987dbede-0277-4b22-b643-36d8b379542f"). InnerVolumeSpecName "kube-api-access-76w8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.925860 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/987dbede-0277-4b22-b643-36d8b379542f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "987dbede-0277-4b22-b643-36d8b379542f" (UID: "987dbede-0277-4b22-b643-36d8b379542f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.945571 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/987dbede-0277-4b22-b643-36d8b379542f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "987dbede-0277-4b22-b643-36d8b379542f" (UID: "987dbede-0277-4b22-b643-36d8b379542f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:18 crc kubenswrapper[4899]: I1003 08:57:18.966375 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/987dbede-0277-4b22-b643-36d8b379542f-config-data" (OuterVolumeSpecName: "config-data") pod "987dbede-0277-4b22-b643-36d8b379542f" (UID: "987dbede-0277-4b22-b643-36d8b379542f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.024163 4899 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/987dbede-0277-4b22-b643-36d8b379542f-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.024473 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/987dbede-0277-4b22-b643-36d8b379542f-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.024487 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76w8k\" (UniqueName: \"kubernetes.io/projected/987dbede-0277-4b22-b643-36d8b379542f-kube-api-access-76w8k\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.024498 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/987dbede-0277-4b22-b643-36d8b379542f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.024510 4899 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/987dbede-0277-4b22-b643-36d8b379542f-logs\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.049676 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 08:57:19 crc kubenswrapper[4899]: W1003 08:57:19.052579 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65edf39f_decc_476a_a5f3_b3d2d785ae67.slice/crio-4ef1bbd791342bfa06190ac93b8f28977d85dd62331ad60e11d027f3378847fa WatchSource:0}: Error finding container 4ef1bbd791342bfa06190ac93b8f28977d85dd62331ad60e11d027f3378847fa: Status 404 returned error can't find the container with id 4ef1bbd791342bfa06190ac93b8f28977d85dd62331ad60e11d027f3378847fa Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.119101 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"65edf39f-decc-476a-a5f3-b3d2d785ae67","Type":"ContainerStarted","Data":"4ef1bbd791342bfa06190ac93b8f28977d85dd62331ad60e11d027f3378847fa"} Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.121373 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887fe423-dd24-43d2-b8df-dcdb615a9fda","Type":"ContainerStarted","Data":"c99c3d96feae301f4057e1f85506e6467cf8624d73bc05e0b90994ad7ee33ab1"} Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.121416 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887fe423-dd24-43d2-b8df-dcdb615a9fda","Type":"ContainerStarted","Data":"d7eaaf291f50e8c8248d5764ccf06be5add2e8200c103d5497c6dde8edf8acfb"} Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.123976 4899 generic.go:334] "Generic (PLEG): container finished" podID="987dbede-0277-4b22-b643-36d8b379542f" containerID="70d85b4c0dff5a81bb678bf9e95a38d95d816ea7ea6093e2f6e67c1780145eb7" exitCode=0 Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.124033 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68b787dc9b-6xw72" Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.124092 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68b787dc9b-6xw72" event={"ID":"987dbede-0277-4b22-b643-36d8b379542f","Type":"ContainerDied","Data":"70d85b4c0dff5a81bb678bf9e95a38d95d816ea7ea6093e2f6e67c1780145eb7"} Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.124116 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68b787dc9b-6xw72" event={"ID":"987dbede-0277-4b22-b643-36d8b379542f","Type":"ContainerDied","Data":"f81981cbb19b9eff70a81a591276d1a179aed8c48954f9385e02763611562a52"} Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.124134 4899 scope.go:117] "RemoveContainer" containerID="70d85b4c0dff5a81bb678bf9e95a38d95d816ea7ea6093e2f6e67c1780145eb7" Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.155038 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-68b787dc9b-6xw72"] Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.157181 4899 scope.go:117] "RemoveContainer" containerID="c2a2658ed205303a0b39c25e056177f5e499bc1a5ae456a53e76c8a7a00efc9b" Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.164779 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-68b787dc9b-6xw72"] Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.175301 4899 scope.go:117] "RemoveContainer" containerID="70d85b4c0dff5a81bb678bf9e95a38d95d816ea7ea6093e2f6e67c1780145eb7" Oct 03 08:57:19 crc kubenswrapper[4899]: E1003 08:57:19.175701 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70d85b4c0dff5a81bb678bf9e95a38d95d816ea7ea6093e2f6e67c1780145eb7\": container with ID starting with 70d85b4c0dff5a81bb678bf9e95a38d95d816ea7ea6093e2f6e67c1780145eb7 not found: ID does not exist" containerID="70d85b4c0dff5a81bb678bf9e95a38d95d816ea7ea6093e2f6e67c1780145eb7" Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.175733 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70d85b4c0dff5a81bb678bf9e95a38d95d816ea7ea6093e2f6e67c1780145eb7"} err="failed to get container status \"70d85b4c0dff5a81bb678bf9e95a38d95d816ea7ea6093e2f6e67c1780145eb7\": rpc error: code = NotFound desc = could not find container \"70d85b4c0dff5a81bb678bf9e95a38d95d816ea7ea6093e2f6e67c1780145eb7\": container with ID starting with 70d85b4c0dff5a81bb678bf9e95a38d95d816ea7ea6093e2f6e67c1780145eb7 not found: ID does not exist" Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.175758 4899 scope.go:117] "RemoveContainer" containerID="c2a2658ed205303a0b39c25e056177f5e499bc1a5ae456a53e76c8a7a00efc9b" Oct 03 08:57:19 crc kubenswrapper[4899]: E1003 08:57:19.176043 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2a2658ed205303a0b39c25e056177f5e499bc1a5ae456a53e76c8a7a00efc9b\": container with ID starting with c2a2658ed205303a0b39c25e056177f5e499bc1a5ae456a53e76c8a7a00efc9b not found: ID does not exist" containerID="c2a2658ed205303a0b39c25e056177f5e499bc1a5ae456a53e76c8a7a00efc9b" Oct 03 08:57:19 crc kubenswrapper[4899]: I1003 08:57:19.176075 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2a2658ed205303a0b39c25e056177f5e499bc1a5ae456a53e76c8a7a00efc9b"} err="failed to get container status \"c2a2658ed205303a0b39c25e056177f5e499bc1a5ae456a53e76c8a7a00efc9b\": rpc error: code = NotFound desc = could not find container \"c2a2658ed205303a0b39c25e056177f5e499bc1a5ae456a53e76c8a7a00efc9b\": container with ID starting with c2a2658ed205303a0b39c25e056177f5e499bc1a5ae456a53e76c8a7a00efc9b not found: ID does not exist" Oct 03 08:57:20 crc kubenswrapper[4899]: I1003 08:57:20.151114 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"65edf39f-decc-476a-a5f3-b3d2d785ae67","Type":"ContainerStarted","Data":"47f6e246d2e754148148093f62bed81cca44060812973d7221ba8305d6466461"} Oct 03 08:57:20 crc kubenswrapper[4899]: I1003 08:57:20.155055 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887fe423-dd24-43d2-b8df-dcdb615a9fda","Type":"ContainerStarted","Data":"1c884c460a557ddbe7a2d80e877f9b451067f43ea5100282946aeeaa34749a7c"} Oct 03 08:57:20 crc kubenswrapper[4899]: I1003 08:57:20.537998 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="987dbede-0277-4b22-b643-36d8b379542f" path="/var/lib/kubelet/pods/987dbede-0277-4b22-b643-36d8b379542f/volumes" Oct 03 08:57:21 crc kubenswrapper[4899]: I1003 08:57:21.166273 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"65edf39f-decc-476a-a5f3-b3d2d785ae67","Type":"ContainerStarted","Data":"95e1659b6e7e56a659a3b46f946afed390d5fc504143d93c8723a7fdcf9d4899"} Oct 03 08:57:21 crc kubenswrapper[4899]: I1003 08:57:21.166692 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 03 08:57:21 crc kubenswrapper[4899]: I1003 08:57:21.169993 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887fe423-dd24-43d2-b8df-dcdb615a9fda","Type":"ContainerStarted","Data":"2040c6b23ed3c5c1bd63344be807abc82e0ff9c097c1f3b785e7dd768f695421"} Oct 03 08:57:21 crc kubenswrapper[4899]: I1003 08:57:21.186004 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.185986295 podStartE2EDuration="3.185986295s" podCreationTimestamp="2025-10-03 08:57:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:57:21.18238769 +0000 UTC m=+1015.289872663" watchObservedRunningTime="2025-10-03 08:57:21.185986295 +0000 UTC m=+1015.293471248" Oct 03 08:57:21 crc kubenswrapper[4899]: I1003 08:57:21.382762 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 03 08:57:21 crc kubenswrapper[4899]: I1003 08:57:21.402810 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:57:21 crc kubenswrapper[4899]: I1003 08:57:21.497834 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-29djt"] Oct 03 08:57:21 crc kubenswrapper[4899]: I1003 08:57:21.498291 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-848cf88cfc-29djt" podUID="e59f15bb-5dfd-40ab-aebb-bd9ea515031b" containerName="dnsmasq-dns" containerID="cri-o://1e138718686cbc7548d43361a999e66c9fe663434aa67c8d601653483ac7e7c0" gracePeriod=10 Oct 03 08:57:21 crc kubenswrapper[4899]: I1003 08:57:21.627079 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.025761 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.179415 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-dns-svc\") pod \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.179476 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-config\") pod \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.179546 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwt8x\" (UniqueName: \"kubernetes.io/projected/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-kube-api-access-xwt8x\") pod \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.179665 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-ovsdbserver-sb\") pod \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.179760 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-ovsdbserver-nb\") pod \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.179851 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-dns-swift-storage-0\") pod \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\" (UID: \"e59f15bb-5dfd-40ab-aebb-bd9ea515031b\") " Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.215835 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887fe423-dd24-43d2-b8df-dcdb615a9fda","Type":"ContainerStarted","Data":"851de5c66a36f80c52feca8dd4990d1e41af9a152781dcc082795d2e88df93e9"} Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.216430 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.222014 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-kube-api-access-xwt8x" (OuterVolumeSpecName: "kube-api-access-xwt8x") pod "e59f15bb-5dfd-40ab-aebb-bd9ea515031b" (UID: "e59f15bb-5dfd-40ab-aebb-bd9ea515031b"). InnerVolumeSpecName "kube-api-access-xwt8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.222814 4899 generic.go:334] "Generic (PLEG): container finished" podID="e59f15bb-5dfd-40ab-aebb-bd9ea515031b" containerID="1e138718686cbc7548d43361a999e66c9fe663434aa67c8d601653483ac7e7c0" exitCode=0 Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.223167 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-29djt" event={"ID":"e59f15bb-5dfd-40ab-aebb-bd9ea515031b","Type":"ContainerDied","Data":"1e138718686cbc7548d43361a999e66c9fe663434aa67c8d601653483ac7e7c0"} Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.223269 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-29djt" event={"ID":"e59f15bb-5dfd-40ab-aebb-bd9ea515031b","Type":"ContainerDied","Data":"e5f156276f357dd26d70d13e21944d6e903d210297f826720affe43b900a08c0"} Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.223358 4899 scope.go:117] "RemoveContainer" containerID="1e138718686cbc7548d43361a999e66c9fe663434aa67c8d601653483ac7e7c0" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.224564 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-29djt" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.265990 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.960064582 podStartE2EDuration="5.265970291s" podCreationTimestamp="2025-10-03 08:57:17 +0000 UTC" firstStartedPulling="2025-10-03 08:57:18.111369689 +0000 UTC m=+1012.218854642" lastFinishedPulling="2025-10-03 08:57:21.417275398 +0000 UTC m=+1015.524760351" observedRunningTime="2025-10-03 08:57:22.240230019 +0000 UTC m=+1016.347714982" watchObservedRunningTime="2025-10-03 08:57:22.265970291 +0000 UTC m=+1016.373455244" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.267522 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e59f15bb-5dfd-40ab-aebb-bd9ea515031b" (UID: "e59f15bb-5dfd-40ab-aebb-bd9ea515031b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.282722 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwt8x\" (UniqueName: \"kubernetes.io/projected/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-kube-api-access-xwt8x\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.282750 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.283191 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e59f15bb-5dfd-40ab-aebb-bd9ea515031b" (UID: "e59f15bb-5dfd-40ab-aebb-bd9ea515031b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.288704 4899 scope.go:117] "RemoveContainer" containerID="2c836952e96096387b0fe31c66803241fccc236de567814f5b5b065874de933f" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.298248 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.303604 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e59f15bb-5dfd-40ab-aebb-bd9ea515031b" (UID: "e59f15bb-5dfd-40ab-aebb-bd9ea515031b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.305500 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e59f15bb-5dfd-40ab-aebb-bd9ea515031b" (UID: "e59f15bb-5dfd-40ab-aebb-bd9ea515031b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.306118 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-config" (OuterVolumeSpecName: "config") pod "e59f15bb-5dfd-40ab-aebb-bd9ea515031b" (UID: "e59f15bb-5dfd-40ab-aebb-bd9ea515031b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.314227 4899 scope.go:117] "RemoveContainer" containerID="1e138718686cbc7548d43361a999e66c9fe663434aa67c8d601653483ac7e7c0" Oct 03 08:57:22 crc kubenswrapper[4899]: E1003 08:57:22.314839 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e138718686cbc7548d43361a999e66c9fe663434aa67c8d601653483ac7e7c0\": container with ID starting with 1e138718686cbc7548d43361a999e66c9fe663434aa67c8d601653483ac7e7c0 not found: ID does not exist" containerID="1e138718686cbc7548d43361a999e66c9fe663434aa67c8d601653483ac7e7c0" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.314882 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e138718686cbc7548d43361a999e66c9fe663434aa67c8d601653483ac7e7c0"} err="failed to get container status \"1e138718686cbc7548d43361a999e66c9fe663434aa67c8d601653483ac7e7c0\": rpc error: code = NotFound desc = could not find container \"1e138718686cbc7548d43361a999e66c9fe663434aa67c8d601653483ac7e7c0\": container with ID starting with 1e138718686cbc7548d43361a999e66c9fe663434aa67c8d601653483ac7e7c0 not found: ID does not exist" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.314931 4899 scope.go:117] "RemoveContainer" containerID="2c836952e96096387b0fe31c66803241fccc236de567814f5b5b065874de933f" Oct 03 08:57:22 crc kubenswrapper[4899]: E1003 08:57:22.315251 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c836952e96096387b0fe31c66803241fccc236de567814f5b5b065874de933f\": container with ID starting with 2c836952e96096387b0fe31c66803241fccc236de567814f5b5b065874de933f not found: ID does not exist" containerID="2c836952e96096387b0fe31c66803241fccc236de567814f5b5b065874de933f" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.315285 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c836952e96096387b0fe31c66803241fccc236de567814f5b5b065874de933f"} err="failed to get container status \"2c836952e96096387b0fe31c66803241fccc236de567814f5b5b065874de933f\": rpc error: code = NotFound desc = could not find container \"2c836952e96096387b0fe31c66803241fccc236de567814f5b5b065874de933f\": container with ID starting with 2c836952e96096387b0fe31c66803241fccc236de567814f5b5b065874de933f not found: ID does not exist" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.385038 4899 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.385078 4899 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.385090 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.385100 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e59f15bb-5dfd-40ab-aebb-bd9ea515031b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.558036 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-29djt"] Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.567770 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-29djt"] Oct 03 08:57:22 crc kubenswrapper[4899]: I1003 08:57:22.927071 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:57:23 crc kubenswrapper[4899]: I1003 08:57:23.233784 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="cf9015e0-4ee8-45f0-9788-bc7060d4c8e5" containerName="cinder-scheduler" containerID="cri-o://af7b3d6c10c134c1c1e3d50f4f3161c2ea1697ba7b6e944c4159ee43548563ff" gracePeriod=30 Oct 03 08:57:23 crc kubenswrapper[4899]: I1003 08:57:23.234345 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="cf9015e0-4ee8-45f0-9788-bc7060d4c8e5" containerName="probe" containerID="cri-o://7397b89fa20b6c5df66f8651b4d6ad8159410551f8e40b3204e76ad9b9ed4336" gracePeriod=30 Oct 03 08:57:23 crc kubenswrapper[4899]: I1003 08:57:23.345693 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:57:24 crc kubenswrapper[4899]: I1003 08:57:24.253733 4899 generic.go:334] "Generic (PLEG): container finished" podID="cf9015e0-4ee8-45f0-9788-bc7060d4c8e5" containerID="7397b89fa20b6c5df66f8651b4d6ad8159410551f8e40b3204e76ad9b9ed4336" exitCode=0 Oct 03 08:57:24 crc kubenswrapper[4899]: I1003 08:57:24.253787 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5","Type":"ContainerDied","Data":"7397b89fa20b6c5df66f8651b4d6ad8159410551f8e40b3204e76ad9b9ed4336"} Oct 03 08:57:24 crc kubenswrapper[4899]: I1003 08:57:24.539043 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e59f15bb-5dfd-40ab-aebb-bd9ea515031b" path="/var/lib/kubelet/pods/e59f15bb-5dfd-40ab-aebb-bd9ea515031b/volumes" Oct 03 08:57:24 crc kubenswrapper[4899]: I1003 08:57:24.859046 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:57:25 crc kubenswrapper[4899]: I1003 08:57:25.167328 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7f5ccd89b4-5dfm2" Oct 03 08:57:25 crc kubenswrapper[4899]: I1003 08:57:25.226640 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5bcb4b4796-x4jmr"] Oct 03 08:57:25 crc kubenswrapper[4899]: I1003 08:57:25.264535 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5bcb4b4796-x4jmr" podUID="2eba2a41-6a16-4f77-a699-157fb6fa7b3f" containerName="horizon-log" containerID="cri-o://8c3b5ed2a58fe798c1bcc02dbfbe942f01e86c53e3fd06877aa330e11f9e69c3" gracePeriod=30 Oct 03 08:57:25 crc kubenswrapper[4899]: I1003 08:57:25.266248 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5bcb4b4796-x4jmr" podUID="2eba2a41-6a16-4f77-a699-157fb6fa7b3f" containerName="horizon" containerID="cri-o://2af927b92552cb7d46e1ad7165e03c49307e20b3511b616a846aab7a5ac52533" gracePeriod=30 Oct 03 08:57:27 crc kubenswrapper[4899]: I1003 08:57:27.675612 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 08:57:27 crc kubenswrapper[4899]: I1003 08:57:27.800460 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-config-data\") pod \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " Oct 03 08:57:27 crc kubenswrapper[4899]: I1003 08:57:27.800573 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-etc-machine-id\") pod \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " Oct 03 08:57:27 crc kubenswrapper[4899]: I1003 08:57:27.800679 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpqml\" (UniqueName: \"kubernetes.io/projected/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-kube-api-access-jpqml\") pod \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " Oct 03 08:57:27 crc kubenswrapper[4899]: I1003 08:57:27.800746 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-scripts\") pod \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " Oct 03 08:57:27 crc kubenswrapper[4899]: I1003 08:57:27.800771 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-combined-ca-bundle\") pod \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " Oct 03 08:57:27 crc kubenswrapper[4899]: I1003 08:57:27.800868 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-config-data-custom\") pod \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\" (UID: \"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5\") " Oct 03 08:57:27 crc kubenswrapper[4899]: I1003 08:57:27.801862 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "cf9015e0-4ee8-45f0-9788-bc7060d4c8e5" (UID: "cf9015e0-4ee8-45f0-9788-bc7060d4c8e5"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 08:57:27 crc kubenswrapper[4899]: I1003 08:57:27.814289 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-scripts" (OuterVolumeSpecName: "scripts") pod "cf9015e0-4ee8-45f0-9788-bc7060d4c8e5" (UID: "cf9015e0-4ee8-45f0-9788-bc7060d4c8e5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:27 crc kubenswrapper[4899]: I1003 08:57:27.814531 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "cf9015e0-4ee8-45f0-9788-bc7060d4c8e5" (UID: "cf9015e0-4ee8-45f0-9788-bc7060d4c8e5"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:27 crc kubenswrapper[4899]: I1003 08:57:27.814866 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-kube-api-access-jpqml" (OuterVolumeSpecName: "kube-api-access-jpqml") pod "cf9015e0-4ee8-45f0-9788-bc7060d4c8e5" (UID: "cf9015e0-4ee8-45f0-9788-bc7060d4c8e5"). InnerVolumeSpecName "kube-api-access-jpqml". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:27 crc kubenswrapper[4899]: I1003 08:57:27.861591 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf9015e0-4ee8-45f0-9788-bc7060d4c8e5" (UID: "cf9015e0-4ee8-45f0-9788-bc7060d4c8e5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:27 crc kubenswrapper[4899]: I1003 08:57:27.903370 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpqml\" (UniqueName: \"kubernetes.io/projected/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-kube-api-access-jpqml\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:27 crc kubenswrapper[4899]: I1003 08:57:27.903410 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:27 crc kubenswrapper[4899]: I1003 08:57:27.903424 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:27 crc kubenswrapper[4899]: I1003 08:57:27.903456 4899 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:27 crc kubenswrapper[4899]: I1003 08:57:27.903467 4899 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:27 crc kubenswrapper[4899]: I1003 08:57:27.903594 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-config-data" (OuterVolumeSpecName: "config-data") pod "cf9015e0-4ee8-45f0-9788-bc7060d4c8e5" (UID: "cf9015e0-4ee8-45f0-9788-bc7060d4c8e5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.005531 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.292840 4899 generic.go:334] "Generic (PLEG): container finished" podID="cf9015e0-4ee8-45f0-9788-bc7060d4c8e5" containerID="af7b3d6c10c134c1c1e3d50f4f3161c2ea1697ba7b6e944c4159ee43548563ff" exitCode=0 Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.292879 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5","Type":"ContainerDied","Data":"af7b3d6c10c134c1c1e3d50f4f3161c2ea1697ba7b6e944c4159ee43548563ff"} Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.292916 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"cf9015e0-4ee8-45f0-9788-bc7060d4c8e5","Type":"ContainerDied","Data":"b20bb2156e6df48d5e85d42d4b3280db47d43f063c1cac2b68cb959d4789dfff"} Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.292932 4899 scope.go:117] "RemoveContainer" containerID="7397b89fa20b6c5df66f8651b4d6ad8159410551f8e40b3204e76ad9b9ed4336" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.293031 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.322699 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.329812 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.334448 4899 scope.go:117] "RemoveContainer" containerID="af7b3d6c10c134c1c1e3d50f4f3161c2ea1697ba7b6e944c4159ee43548563ff" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.346995 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 08:57:28 crc kubenswrapper[4899]: E1003 08:57:28.347495 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="987dbede-0277-4b22-b643-36d8b379542f" containerName="barbican-api-log" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.347521 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="987dbede-0277-4b22-b643-36d8b379542f" containerName="barbican-api-log" Oct 03 08:57:28 crc kubenswrapper[4899]: E1003 08:57:28.347534 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e59f15bb-5dfd-40ab-aebb-bd9ea515031b" containerName="dnsmasq-dns" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.347542 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="e59f15bb-5dfd-40ab-aebb-bd9ea515031b" containerName="dnsmasq-dns" Oct 03 08:57:28 crc kubenswrapper[4899]: E1003 08:57:28.347560 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="987dbede-0277-4b22-b643-36d8b379542f" containerName="barbican-api" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.347567 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="987dbede-0277-4b22-b643-36d8b379542f" containerName="barbican-api" Oct 03 08:57:28 crc kubenswrapper[4899]: E1003 08:57:28.347586 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf9015e0-4ee8-45f0-9788-bc7060d4c8e5" containerName="probe" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.347594 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf9015e0-4ee8-45f0-9788-bc7060d4c8e5" containerName="probe" Oct 03 08:57:28 crc kubenswrapper[4899]: E1003 08:57:28.347624 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e59f15bb-5dfd-40ab-aebb-bd9ea515031b" containerName="init" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.347631 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="e59f15bb-5dfd-40ab-aebb-bd9ea515031b" containerName="init" Oct 03 08:57:28 crc kubenswrapper[4899]: E1003 08:57:28.347647 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf9015e0-4ee8-45f0-9788-bc7060d4c8e5" containerName="cinder-scheduler" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.347655 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf9015e0-4ee8-45f0-9788-bc7060d4c8e5" containerName="cinder-scheduler" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.347919 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="987dbede-0277-4b22-b643-36d8b379542f" containerName="barbican-api" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.347942 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf9015e0-4ee8-45f0-9788-bc7060d4c8e5" containerName="probe" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.347957 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="987dbede-0277-4b22-b643-36d8b379542f" containerName="barbican-api-log" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.347970 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="e59f15bb-5dfd-40ab-aebb-bd9ea515031b" containerName="dnsmasq-dns" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.347988 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf9015e0-4ee8-45f0-9788-bc7060d4c8e5" containerName="cinder-scheduler" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.349181 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.352573 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.353801 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.404136 4899 scope.go:117] "RemoveContainer" containerID="7397b89fa20b6c5df66f8651b4d6ad8159410551f8e40b3204e76ad9b9ed4336" Oct 03 08:57:28 crc kubenswrapper[4899]: E1003 08:57:28.404685 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7397b89fa20b6c5df66f8651b4d6ad8159410551f8e40b3204e76ad9b9ed4336\": container with ID starting with 7397b89fa20b6c5df66f8651b4d6ad8159410551f8e40b3204e76ad9b9ed4336 not found: ID does not exist" containerID="7397b89fa20b6c5df66f8651b4d6ad8159410551f8e40b3204e76ad9b9ed4336" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.405054 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7397b89fa20b6c5df66f8651b4d6ad8159410551f8e40b3204e76ad9b9ed4336"} err="failed to get container status \"7397b89fa20b6c5df66f8651b4d6ad8159410551f8e40b3204e76ad9b9ed4336\": rpc error: code = NotFound desc = could not find container \"7397b89fa20b6c5df66f8651b4d6ad8159410551f8e40b3204e76ad9b9ed4336\": container with ID starting with 7397b89fa20b6c5df66f8651b4d6ad8159410551f8e40b3204e76ad9b9ed4336 not found: ID does not exist" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.405073 4899 scope.go:117] "RemoveContainer" containerID="af7b3d6c10c134c1c1e3d50f4f3161c2ea1697ba7b6e944c4159ee43548563ff" Oct 03 08:57:28 crc kubenswrapper[4899]: E1003 08:57:28.405818 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af7b3d6c10c134c1c1e3d50f4f3161c2ea1697ba7b6e944c4159ee43548563ff\": container with ID starting with af7b3d6c10c134c1c1e3d50f4f3161c2ea1697ba7b6e944c4159ee43548563ff not found: ID does not exist" containerID="af7b3d6c10c134c1c1e3d50f4f3161c2ea1697ba7b6e944c4159ee43548563ff" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.405843 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af7b3d6c10c134c1c1e3d50f4f3161c2ea1697ba7b6e944c4159ee43548563ff"} err="failed to get container status \"af7b3d6c10c134c1c1e3d50f4f3161c2ea1697ba7b6e944c4159ee43548563ff\": rpc error: code = NotFound desc = could not find container \"af7b3d6c10c134c1c1e3d50f4f3161c2ea1697ba7b6e944c4159ee43548563ff\": container with ID starting with af7b3d6c10c134c1c1e3d50f4f3161c2ea1697ba7b6e944c4159ee43548563ff not found: ID does not exist" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.513373 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d32bb24-0270-45bb-b242-0aa2517f1cf3-scripts\") pod \"cinder-scheduler-0\" (UID: \"0d32bb24-0270-45bb-b242-0aa2517f1cf3\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.513443 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d32bb24-0270-45bb-b242-0aa2517f1cf3-config-data\") pod \"cinder-scheduler-0\" (UID: \"0d32bb24-0270-45bb-b242-0aa2517f1cf3\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.513476 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d32bb24-0270-45bb-b242-0aa2517f1cf3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0d32bb24-0270-45bb-b242-0aa2517f1cf3\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.513609 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz7vn\" (UniqueName: \"kubernetes.io/projected/0d32bb24-0270-45bb-b242-0aa2517f1cf3-kube-api-access-bz7vn\") pod \"cinder-scheduler-0\" (UID: \"0d32bb24-0270-45bb-b242-0aa2517f1cf3\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.513842 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d32bb24-0270-45bb-b242-0aa2517f1cf3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0d32bb24-0270-45bb-b242-0aa2517f1cf3\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.513933 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d32bb24-0270-45bb-b242-0aa2517f1cf3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0d32bb24-0270-45bb-b242-0aa2517f1cf3\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.538708 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf9015e0-4ee8-45f0-9788-bc7060d4c8e5" path="/var/lib/kubelet/pods/cf9015e0-4ee8-45f0-9788-bc7060d4c8e5/volumes" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.616015 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d32bb24-0270-45bb-b242-0aa2517f1cf3-scripts\") pod \"cinder-scheduler-0\" (UID: \"0d32bb24-0270-45bb-b242-0aa2517f1cf3\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.616071 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d32bb24-0270-45bb-b242-0aa2517f1cf3-config-data\") pod \"cinder-scheduler-0\" (UID: \"0d32bb24-0270-45bb-b242-0aa2517f1cf3\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.616100 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d32bb24-0270-45bb-b242-0aa2517f1cf3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0d32bb24-0270-45bb-b242-0aa2517f1cf3\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.616138 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz7vn\" (UniqueName: \"kubernetes.io/projected/0d32bb24-0270-45bb-b242-0aa2517f1cf3-kube-api-access-bz7vn\") pod \"cinder-scheduler-0\" (UID: \"0d32bb24-0270-45bb-b242-0aa2517f1cf3\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.616201 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d32bb24-0270-45bb-b242-0aa2517f1cf3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0d32bb24-0270-45bb-b242-0aa2517f1cf3\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.616217 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d32bb24-0270-45bb-b242-0aa2517f1cf3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0d32bb24-0270-45bb-b242-0aa2517f1cf3\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.616316 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d32bb24-0270-45bb-b242-0aa2517f1cf3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0d32bb24-0270-45bb-b242-0aa2517f1cf3\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.620092 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d32bb24-0270-45bb-b242-0aa2517f1cf3-scripts\") pod \"cinder-scheduler-0\" (UID: \"0d32bb24-0270-45bb-b242-0aa2517f1cf3\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.620330 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d32bb24-0270-45bb-b242-0aa2517f1cf3-config-data\") pod \"cinder-scheduler-0\" (UID: \"0d32bb24-0270-45bb-b242-0aa2517f1cf3\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.620815 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d32bb24-0270-45bb-b242-0aa2517f1cf3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0d32bb24-0270-45bb-b242-0aa2517f1cf3\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.622690 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d32bb24-0270-45bb-b242-0aa2517f1cf3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0d32bb24-0270-45bb-b242-0aa2517f1cf3\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.632163 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz7vn\" (UniqueName: \"kubernetes.io/projected/0d32bb24-0270-45bb-b242-0aa2517f1cf3-kube-api-access-bz7vn\") pod \"cinder-scheduler-0\" (UID: \"0d32bb24-0270-45bb-b242-0aa2517f1cf3\") " pod="openstack/cinder-scheduler-0" Oct 03 08:57:28 crc kubenswrapper[4899]: I1003 08:57:28.704922 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 08:57:29 crc kubenswrapper[4899]: I1003 08:57:29.120486 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 08:57:29 crc kubenswrapper[4899]: I1003 08:57:29.306296 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0d32bb24-0270-45bb-b242-0aa2517f1cf3","Type":"ContainerStarted","Data":"43a817ea890c4230a89b27f904c0e37808c971dd1856bc7b3b62478f3f97199b"} Oct 03 08:57:29 crc kubenswrapper[4899]: I1003 08:57:29.311445 4899 generic.go:334] "Generic (PLEG): container finished" podID="2eba2a41-6a16-4f77-a699-157fb6fa7b3f" containerID="2af927b92552cb7d46e1ad7165e03c49307e20b3511b616a846aab7a5ac52533" exitCode=0 Oct 03 08:57:29 crc kubenswrapper[4899]: I1003 08:57:29.311483 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5bcb4b4796-x4jmr" event={"ID":"2eba2a41-6a16-4f77-a699-157fb6fa7b3f","Type":"ContainerDied","Data":"2af927b92552cb7d46e1ad7165e03c49307e20b3511b616a846aab7a5ac52533"} Oct 03 08:57:30 crc kubenswrapper[4899]: I1003 08:57:30.324187 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0d32bb24-0270-45bb-b242-0aa2517f1cf3","Type":"ContainerStarted","Data":"412d11174c0b2fc86090c11ca6096227a87d52cfad32f5d1b8702feb5ba256d5"} Oct 03 08:57:30 crc kubenswrapper[4899]: I1003 08:57:30.324722 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0d32bb24-0270-45bb-b242-0aa2517f1cf3","Type":"ContainerStarted","Data":"539c5f80acbbbff5628e659515750f4992a2cef4393eadd54f03821e9cb9e699"} Oct 03 08:57:30 crc kubenswrapper[4899]: I1003 08:57:30.344228 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.344213522 podStartE2EDuration="2.344213522s" podCreationTimestamp="2025-10-03 08:57:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:57:30.342028123 +0000 UTC m=+1024.449513076" watchObservedRunningTime="2025-10-03 08:57:30.344213522 +0000 UTC m=+1024.451698475" Oct 03 08:57:30 crc kubenswrapper[4899]: I1003 08:57:30.561469 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 03 08:57:30 crc kubenswrapper[4899]: I1003 08:57:30.879349 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5bcb4b4796-x4jmr" podUID="2eba2a41-6a16-4f77-a699-157fb6fa7b3f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Oct 03 08:57:31 crc kubenswrapper[4899]: I1003 08:57:31.127922 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:57:31 crc kubenswrapper[4899]: I1003 08:57:31.465955 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-568fd9848b-bw6ch" Oct 03 08:57:31 crc kubenswrapper[4899]: I1003 08:57:31.790195 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-688fdbdf8c-rnx7k" Oct 03 08:57:33 crc kubenswrapper[4899]: I1003 08:57:33.705648 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 03 08:57:34 crc kubenswrapper[4899]: I1003 08:57:34.802025 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 03 08:57:34 crc kubenswrapper[4899]: I1003 08:57:34.803416 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 08:57:34 crc kubenswrapper[4899]: I1003 08:57:34.805237 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 03 08:57:34 crc kubenswrapper[4899]: I1003 08:57:34.805365 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 03 08:57:34 crc kubenswrapper[4899]: I1003 08:57:34.806812 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-7gftj" Oct 03 08:57:34 crc kubenswrapper[4899]: I1003 08:57:34.810845 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 03 08:57:34 crc kubenswrapper[4899]: I1003 08:57:34.944578 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c6ed6e6-287d-4267-9cfd-b7b554691da8-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1c6ed6e6-287d-4267-9cfd-b7b554691da8\") " pod="openstack/openstackclient" Oct 03 08:57:34 crc kubenswrapper[4899]: I1003 08:57:34.944711 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1c6ed6e6-287d-4267-9cfd-b7b554691da8-openstack-config\") pod \"openstackclient\" (UID: \"1c6ed6e6-287d-4267-9cfd-b7b554691da8\") " pod="openstack/openstackclient" Oct 03 08:57:34 crc kubenswrapper[4899]: I1003 08:57:34.944835 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1c6ed6e6-287d-4267-9cfd-b7b554691da8-openstack-config-secret\") pod \"openstackclient\" (UID: \"1c6ed6e6-287d-4267-9cfd-b7b554691da8\") " pod="openstack/openstackclient" Oct 03 08:57:34 crc kubenswrapper[4899]: I1003 08:57:34.944862 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhg2g\" (UniqueName: \"kubernetes.io/projected/1c6ed6e6-287d-4267-9cfd-b7b554691da8-kube-api-access-mhg2g\") pod \"openstackclient\" (UID: \"1c6ed6e6-287d-4267-9cfd-b7b554691da8\") " pod="openstack/openstackclient" Oct 03 08:57:35 crc kubenswrapper[4899]: I1003 08:57:35.046612 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1c6ed6e6-287d-4267-9cfd-b7b554691da8-openstack-config\") pod \"openstackclient\" (UID: \"1c6ed6e6-287d-4267-9cfd-b7b554691da8\") " pod="openstack/openstackclient" Oct 03 08:57:35 crc kubenswrapper[4899]: I1003 08:57:35.046752 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1c6ed6e6-287d-4267-9cfd-b7b554691da8-openstack-config-secret\") pod \"openstackclient\" (UID: \"1c6ed6e6-287d-4267-9cfd-b7b554691da8\") " pod="openstack/openstackclient" Oct 03 08:57:35 crc kubenswrapper[4899]: I1003 08:57:35.046790 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhg2g\" (UniqueName: \"kubernetes.io/projected/1c6ed6e6-287d-4267-9cfd-b7b554691da8-kube-api-access-mhg2g\") pod \"openstackclient\" (UID: \"1c6ed6e6-287d-4267-9cfd-b7b554691da8\") " pod="openstack/openstackclient" Oct 03 08:57:35 crc kubenswrapper[4899]: I1003 08:57:35.046880 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c6ed6e6-287d-4267-9cfd-b7b554691da8-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1c6ed6e6-287d-4267-9cfd-b7b554691da8\") " pod="openstack/openstackclient" Oct 03 08:57:35 crc kubenswrapper[4899]: I1003 08:57:35.047962 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1c6ed6e6-287d-4267-9cfd-b7b554691da8-openstack-config\") pod \"openstackclient\" (UID: \"1c6ed6e6-287d-4267-9cfd-b7b554691da8\") " pod="openstack/openstackclient" Oct 03 08:57:35 crc kubenswrapper[4899]: I1003 08:57:35.052268 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1c6ed6e6-287d-4267-9cfd-b7b554691da8-openstack-config-secret\") pod \"openstackclient\" (UID: \"1c6ed6e6-287d-4267-9cfd-b7b554691da8\") " pod="openstack/openstackclient" Oct 03 08:57:35 crc kubenswrapper[4899]: I1003 08:57:35.056993 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c6ed6e6-287d-4267-9cfd-b7b554691da8-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1c6ed6e6-287d-4267-9cfd-b7b554691da8\") " pod="openstack/openstackclient" Oct 03 08:57:35 crc kubenswrapper[4899]: I1003 08:57:35.066400 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhg2g\" (UniqueName: \"kubernetes.io/projected/1c6ed6e6-287d-4267-9cfd-b7b554691da8-kube-api-access-mhg2g\") pod \"openstackclient\" (UID: \"1c6ed6e6-287d-4267-9cfd-b7b554691da8\") " pod="openstack/openstackclient" Oct 03 08:57:35 crc kubenswrapper[4899]: I1003 08:57:35.132670 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 08:57:35 crc kubenswrapper[4899]: I1003 08:57:35.606125 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 03 08:57:36 crc kubenswrapper[4899]: I1003 08:57:36.380400 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"1c6ed6e6-287d-4267-9cfd-b7b554691da8","Type":"ContainerStarted","Data":"76dc473e063ece7e067862c9f5552fbb54b607c2dede9917f9a4b831c36aa62d"} Oct 03 08:57:38 crc kubenswrapper[4899]: I1003 08:57:38.953557 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 03 08:57:39 crc kubenswrapper[4899]: I1003 08:57:39.970975 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:57:39 crc kubenswrapper[4899]: I1003 08:57:39.971246 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerName="ceilometer-central-agent" containerID="cri-o://c99c3d96feae301f4057e1f85506e6467cf8624d73bc05e0b90994ad7ee33ab1" gracePeriod=30 Oct 03 08:57:39 crc kubenswrapper[4899]: I1003 08:57:39.971855 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerName="sg-core" containerID="cri-o://2040c6b23ed3c5c1bd63344be807abc82e0ff9c097c1f3b785e7dd768f695421" gracePeriod=30 Oct 03 08:57:39 crc kubenswrapper[4899]: I1003 08:57:39.972122 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerName="proxy-httpd" containerID="cri-o://851de5c66a36f80c52feca8dd4990d1e41af9a152781dcc082795d2e88df93e9" gracePeriod=30 Oct 03 08:57:39 crc kubenswrapper[4899]: I1003 08:57:39.971948 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerName="ceilometer-notification-agent" containerID="cri-o://1c884c460a557ddbe7a2d80e877f9b451067f43ea5100282946aeeaa34749a7c" gracePeriod=30 Oct 03 08:57:39 crc kubenswrapper[4899]: I1003 08:57:39.983415 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.165:3000/\": EOF" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.022766 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-54794d7d5c-64vlg"] Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.024728 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.027520 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.028144 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.028332 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.035759 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-54794d7d5c-64vlg"] Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.151569 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/088c667d-5a03-44d1-a2fc-c9de7910e5a8-public-tls-certs\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.151638 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/088c667d-5a03-44d1-a2fc-c9de7910e5a8-config-data\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.151662 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/088c667d-5a03-44d1-a2fc-c9de7910e5a8-log-httpd\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.151705 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/088c667d-5a03-44d1-a2fc-c9de7910e5a8-run-httpd\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.151731 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/088c667d-5a03-44d1-a2fc-c9de7910e5a8-internal-tls-certs\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.151754 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jm9cf\" (UniqueName: \"kubernetes.io/projected/088c667d-5a03-44d1-a2fc-c9de7910e5a8-kube-api-access-jm9cf\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.151827 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/088c667d-5a03-44d1-a2fc-c9de7910e5a8-etc-swift\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.151846 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/088c667d-5a03-44d1-a2fc-c9de7910e5a8-combined-ca-bundle\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.253019 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/088c667d-5a03-44d1-a2fc-c9de7910e5a8-etc-swift\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.253067 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/088c667d-5a03-44d1-a2fc-c9de7910e5a8-combined-ca-bundle\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.253106 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/088c667d-5a03-44d1-a2fc-c9de7910e5a8-public-tls-certs\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.253158 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/088c667d-5a03-44d1-a2fc-c9de7910e5a8-config-data\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.253185 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/088c667d-5a03-44d1-a2fc-c9de7910e5a8-log-httpd\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.253227 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/088c667d-5a03-44d1-a2fc-c9de7910e5a8-run-httpd\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.253255 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/088c667d-5a03-44d1-a2fc-c9de7910e5a8-internal-tls-certs\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.253280 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jm9cf\" (UniqueName: \"kubernetes.io/projected/088c667d-5a03-44d1-a2fc-c9de7910e5a8-kube-api-access-jm9cf\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.255522 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/088c667d-5a03-44d1-a2fc-c9de7910e5a8-run-httpd\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.255522 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/088c667d-5a03-44d1-a2fc-c9de7910e5a8-log-httpd\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.269181 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/088c667d-5a03-44d1-a2fc-c9de7910e5a8-internal-tls-certs\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.270119 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/088c667d-5a03-44d1-a2fc-c9de7910e5a8-public-tls-certs\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.270211 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/088c667d-5a03-44d1-a2fc-c9de7910e5a8-etc-swift\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.275666 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/088c667d-5a03-44d1-a2fc-c9de7910e5a8-combined-ca-bundle\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.275737 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/088c667d-5a03-44d1-a2fc-c9de7910e5a8-config-data\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.277224 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jm9cf\" (UniqueName: \"kubernetes.io/projected/088c667d-5a03-44d1-a2fc-c9de7910e5a8-kube-api-access-jm9cf\") pod \"swift-proxy-54794d7d5c-64vlg\" (UID: \"088c667d-5a03-44d1-a2fc-c9de7910e5a8\") " pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.344539 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.435291 4899 generic.go:334] "Generic (PLEG): container finished" podID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerID="851de5c66a36f80c52feca8dd4990d1e41af9a152781dcc082795d2e88df93e9" exitCode=0 Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.435338 4899 generic.go:334] "Generic (PLEG): container finished" podID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerID="2040c6b23ed3c5c1bd63344be807abc82e0ff9c097c1f3b785e7dd768f695421" exitCode=2 Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.435370 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887fe423-dd24-43d2-b8df-dcdb615a9fda","Type":"ContainerDied","Data":"851de5c66a36f80c52feca8dd4990d1e41af9a152781dcc082795d2e88df93e9"} Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.435410 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887fe423-dd24-43d2-b8df-dcdb615a9fda","Type":"ContainerDied","Data":"2040c6b23ed3c5c1bd63344be807abc82e0ff9c097c1f3b785e7dd768f695421"} Oct 03 08:57:40 crc kubenswrapper[4899]: I1003 08:57:40.879783 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5bcb4b4796-x4jmr" podUID="2eba2a41-6a16-4f77-a699-157fb6fa7b3f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.276207 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-6mwps"] Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.277725 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6mwps" Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.289781 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-6mwps"] Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.371578 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-9d7r2"] Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.372927 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-9d7r2" Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.381846 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-9d7r2"] Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.417092 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzh44\" (UniqueName: \"kubernetes.io/projected/1e579b0b-7373-4ab5-b543-3b58fa367f1a-kube-api-access-xzh44\") pod \"nova-api-db-create-6mwps\" (UID: \"1e579b0b-7373-4ab5-b543-3b58fa367f1a\") " pod="openstack/nova-api-db-create-6mwps" Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.447062 4899 generic.go:334] "Generic (PLEG): container finished" podID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerID="1c884c460a557ddbe7a2d80e877f9b451067f43ea5100282946aeeaa34749a7c" exitCode=0 Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.447097 4899 generic.go:334] "Generic (PLEG): container finished" podID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerID="c99c3d96feae301f4057e1f85506e6467cf8624d73bc05e0b90994ad7ee33ab1" exitCode=0 Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.447100 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887fe423-dd24-43d2-b8df-dcdb615a9fda","Type":"ContainerDied","Data":"1c884c460a557ddbe7a2d80e877f9b451067f43ea5100282946aeeaa34749a7c"} Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.447499 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887fe423-dd24-43d2-b8df-dcdb615a9fda","Type":"ContainerDied","Data":"c99c3d96feae301f4057e1f85506e6467cf8624d73bc05e0b90994ad7ee33ab1"} Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.478603 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-cxdjt"] Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.481730 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-cxdjt" Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.488303 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-cxdjt"] Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.519118 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2n5q4\" (UniqueName: \"kubernetes.io/projected/babb4e0d-c482-4cb4-8140-4bfc34ab2afc-kube-api-access-2n5q4\") pod \"nova-cell0-db-create-9d7r2\" (UID: \"babb4e0d-c482-4cb4-8140-4bfc34ab2afc\") " pod="openstack/nova-cell0-db-create-9d7r2" Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.519273 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzh44\" (UniqueName: \"kubernetes.io/projected/1e579b0b-7373-4ab5-b543-3b58fa367f1a-kube-api-access-xzh44\") pod \"nova-api-db-create-6mwps\" (UID: \"1e579b0b-7373-4ab5-b543-3b58fa367f1a\") " pod="openstack/nova-api-db-create-6mwps" Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.539430 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzh44\" (UniqueName: \"kubernetes.io/projected/1e579b0b-7373-4ab5-b543-3b58fa367f1a-kube-api-access-xzh44\") pod \"nova-api-db-create-6mwps\" (UID: \"1e579b0b-7373-4ab5-b543-3b58fa367f1a\") " pod="openstack/nova-api-db-create-6mwps" Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.601586 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6mwps" Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.621499 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l82h4\" (UniqueName: \"kubernetes.io/projected/d14aaa2d-9ea7-4886-8464-effc840164f4-kube-api-access-l82h4\") pod \"nova-cell1-db-create-cxdjt\" (UID: \"d14aaa2d-9ea7-4886-8464-effc840164f4\") " pod="openstack/nova-cell1-db-create-cxdjt" Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.621599 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2n5q4\" (UniqueName: \"kubernetes.io/projected/babb4e0d-c482-4cb4-8140-4bfc34ab2afc-kube-api-access-2n5q4\") pod \"nova-cell0-db-create-9d7r2\" (UID: \"babb4e0d-c482-4cb4-8140-4bfc34ab2afc\") " pod="openstack/nova-cell0-db-create-9d7r2" Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.643163 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2n5q4\" (UniqueName: \"kubernetes.io/projected/babb4e0d-c482-4cb4-8140-4bfc34ab2afc-kube-api-access-2n5q4\") pod \"nova-cell0-db-create-9d7r2\" (UID: \"babb4e0d-c482-4cb4-8140-4bfc34ab2afc\") " pod="openstack/nova-cell0-db-create-9d7r2" Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.724184 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l82h4\" (UniqueName: \"kubernetes.io/projected/d14aaa2d-9ea7-4886-8464-effc840164f4-kube-api-access-l82h4\") pod \"nova-cell1-db-create-cxdjt\" (UID: \"d14aaa2d-9ea7-4886-8464-effc840164f4\") " pod="openstack/nova-cell1-db-create-cxdjt" Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.744472 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-9d7r2" Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.750161 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l82h4\" (UniqueName: \"kubernetes.io/projected/d14aaa2d-9ea7-4886-8464-effc840164f4-kube-api-access-l82h4\") pod \"nova-cell1-db-create-cxdjt\" (UID: \"d14aaa2d-9ea7-4886-8464-effc840164f4\") " pod="openstack/nova-cell1-db-create-cxdjt" Oct 03 08:57:41 crc kubenswrapper[4899]: I1003 08:57:41.803449 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-cxdjt" Oct 03 08:57:45 crc kubenswrapper[4899]: I1003 08:57:45.127651 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 08:57:45 crc kubenswrapper[4899]: I1003 08:57:45.128231 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="405fb02a-9786-4dc3-a4a6-bf885f7a75fe" containerName="glance-log" containerID="cri-o://b3bcfd680d21dffd9a651646bb698ef6ce38b1311bd4b55221c2a36cfb915611" gracePeriod=30 Oct 03 08:57:45 crc kubenswrapper[4899]: I1003 08:57:45.128348 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="405fb02a-9786-4dc3-a4a6-bf885f7a75fe" containerName="glance-httpd" containerID="cri-o://acbded3a0caa4a0ee411e002a31288813cc64cc755b63cb8ad83dfd1ea64b4fc" gracePeriod=30 Oct 03 08:57:45 crc kubenswrapper[4899]: I1003 08:57:45.485622 4899 generic.go:334] "Generic (PLEG): container finished" podID="405fb02a-9786-4dc3-a4a6-bf885f7a75fe" containerID="b3bcfd680d21dffd9a651646bb698ef6ce38b1311bd4b55221c2a36cfb915611" exitCode=143 Oct 03 08:57:45 crc kubenswrapper[4899]: I1003 08:57:45.485700 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"405fb02a-9786-4dc3-a4a6-bf885f7a75fe","Type":"ContainerDied","Data":"b3bcfd680d21dffd9a651646bb698ef6ce38b1311bd4b55221c2a36cfb915611"} Oct 03 08:57:46 crc kubenswrapper[4899]: I1003 08:57:46.647369 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 08:57:46 crc kubenswrapper[4899]: I1003 08:57:46.647998 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="894a907e-c584-4671-9ba8-e4b1df804b5b" containerName="glance-log" containerID="cri-o://32a3e5375a1223b70531d108e09150a9f5890189799bfa1dc1526e56b37f19b1" gracePeriod=30 Oct 03 08:57:46 crc kubenswrapper[4899]: I1003 08:57:46.648183 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="894a907e-c584-4671-9ba8-e4b1df804b5b" containerName="glance-httpd" containerID="cri-o://d8e9a0d728d0a78bc45d8f5dc222e918da9039d54b35fd14862f4518b06a1ebc" gracePeriod=30 Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.090111 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.135495 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-sg-core-conf-yaml\") pod \"887fe423-dd24-43d2-b8df-dcdb615a9fda\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.135539 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htzld\" (UniqueName: \"kubernetes.io/projected/887fe423-dd24-43d2-b8df-dcdb615a9fda-kube-api-access-htzld\") pod \"887fe423-dd24-43d2-b8df-dcdb615a9fda\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.135606 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-scripts\") pod \"887fe423-dd24-43d2-b8df-dcdb615a9fda\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.135663 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-combined-ca-bundle\") pod \"887fe423-dd24-43d2-b8df-dcdb615a9fda\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.135684 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-config-data\") pod \"887fe423-dd24-43d2-b8df-dcdb615a9fda\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.135754 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887fe423-dd24-43d2-b8df-dcdb615a9fda-run-httpd\") pod \"887fe423-dd24-43d2-b8df-dcdb615a9fda\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.135810 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887fe423-dd24-43d2-b8df-dcdb615a9fda-log-httpd\") pod \"887fe423-dd24-43d2-b8df-dcdb615a9fda\" (UID: \"887fe423-dd24-43d2-b8df-dcdb615a9fda\") " Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.137140 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/887fe423-dd24-43d2-b8df-dcdb615a9fda-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "887fe423-dd24-43d2-b8df-dcdb615a9fda" (UID: "887fe423-dd24-43d2-b8df-dcdb615a9fda"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.145123 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/887fe423-dd24-43d2-b8df-dcdb615a9fda-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "887fe423-dd24-43d2-b8df-dcdb615a9fda" (UID: "887fe423-dd24-43d2-b8df-dcdb615a9fda"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.146479 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/887fe423-dd24-43d2-b8df-dcdb615a9fda-kube-api-access-htzld" (OuterVolumeSpecName: "kube-api-access-htzld") pod "887fe423-dd24-43d2-b8df-dcdb615a9fda" (UID: "887fe423-dd24-43d2-b8df-dcdb615a9fda"). InnerVolumeSpecName "kube-api-access-htzld". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.152021 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-scripts" (OuterVolumeSpecName: "scripts") pod "887fe423-dd24-43d2-b8df-dcdb615a9fda" (UID: "887fe423-dd24-43d2-b8df-dcdb615a9fda"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.171048 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "887fe423-dd24-43d2-b8df-dcdb615a9fda" (UID: "887fe423-dd24-43d2-b8df-dcdb615a9fda"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.229138 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "887fe423-dd24-43d2-b8df-dcdb615a9fda" (UID: "887fe423-dd24-43d2-b8df-dcdb615a9fda"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.238550 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.238566 4899 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887fe423-dd24-43d2-b8df-dcdb615a9fda-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.238575 4899 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887fe423-dd24-43d2-b8df-dcdb615a9fda-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.238585 4899 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.238593 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htzld\" (UniqueName: \"kubernetes.io/projected/887fe423-dd24-43d2-b8df-dcdb615a9fda-kube-api-access-htzld\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.238601 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.255503 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-config-data" (OuterVolumeSpecName: "config-data") pod "887fe423-dd24-43d2-b8df-dcdb615a9fda" (UID: "887fe423-dd24-43d2-b8df-dcdb615a9fda"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.339905 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/887fe423-dd24-43d2-b8df-dcdb615a9fda-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.380938 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-54794d7d5c-64vlg"] Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.435422 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-cxdjt"] Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.501880 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-9d7r2"] Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.515579 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-6mwps"] Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.518621 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"1c6ed6e6-287d-4267-9cfd-b7b554691da8","Type":"ContainerStarted","Data":"4f8700e1f6e801f7a06052d07b1f8e898c7c12c90d9f10ab2e884587fcf90c5c"} Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.523068 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-54794d7d5c-64vlg" event={"ID":"088c667d-5a03-44d1-a2fc-c9de7910e5a8","Type":"ContainerStarted","Data":"626cb454da437691026d142e74a58f4abab7a52204065bbb49c5d585d34c3c01"} Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.527168 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887fe423-dd24-43d2-b8df-dcdb615a9fda","Type":"ContainerDied","Data":"d7eaaf291f50e8c8248d5764ccf06be5add2e8200c103d5497c6dde8edf8acfb"} Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.527217 4899 scope.go:117] "RemoveContainer" containerID="851de5c66a36f80c52feca8dd4990d1e41af9a152781dcc082795d2e88df93e9" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.527183 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.545688 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-cxdjt" event={"ID":"d14aaa2d-9ea7-4886-8464-effc840164f4","Type":"ContainerStarted","Data":"5025e15b8bd8f20e317e5ef6a5d0aa26cac5bed33551c34811f96ab67227fb71"} Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.552792 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.354009938 podStartE2EDuration="13.552772073s" podCreationTimestamp="2025-10-03 08:57:34 +0000 UTC" firstStartedPulling="2025-10-03 08:57:35.602828919 +0000 UTC m=+1029.710313872" lastFinishedPulling="2025-10-03 08:57:46.801591054 +0000 UTC m=+1040.909076007" observedRunningTime="2025-10-03 08:57:47.53558724 +0000 UTC m=+1041.643072193" watchObservedRunningTime="2025-10-03 08:57:47.552772073 +0000 UTC m=+1041.660257026" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.556577 4899 generic.go:334] "Generic (PLEG): container finished" podID="894a907e-c584-4671-9ba8-e4b1df804b5b" containerID="32a3e5375a1223b70531d108e09150a9f5890189799bfa1dc1526e56b37f19b1" exitCode=143 Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.556631 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"894a907e-c584-4671-9ba8-e4b1df804b5b","Type":"ContainerDied","Data":"32a3e5375a1223b70531d108e09150a9f5890189799bfa1dc1526e56b37f19b1"} Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.610571 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.626102 4899 scope.go:117] "RemoveContainer" containerID="2040c6b23ed3c5c1bd63344be807abc82e0ff9c097c1f3b785e7dd768f695421" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.638268 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.665842 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:57:47 crc kubenswrapper[4899]: E1003 08:57:47.667376 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerName="ceilometer-notification-agent" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.667395 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerName="ceilometer-notification-agent" Oct 03 08:57:47 crc kubenswrapper[4899]: E1003 08:57:47.667444 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerName="proxy-httpd" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.667451 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerName="proxy-httpd" Oct 03 08:57:47 crc kubenswrapper[4899]: E1003 08:57:47.667473 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerName="ceilometer-central-agent" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.667479 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerName="ceilometer-central-agent" Oct 03 08:57:47 crc kubenswrapper[4899]: E1003 08:57:47.667499 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerName="sg-core" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.667505 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerName="sg-core" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.667950 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerName="sg-core" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.667978 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerName="ceilometer-central-agent" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.668000 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerName="ceilometer-notification-agent" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.668010 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="887fe423-dd24-43d2-b8df-dcdb615a9fda" containerName="proxy-httpd" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.671154 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.675981 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.676281 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.694053 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.732090 4899 scope.go:117] "RemoveContainer" containerID="1c884c460a557ddbe7a2d80e877f9b451067f43ea5100282946aeeaa34749a7c" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.754178 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-scripts\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.754235 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtdwc\" (UniqueName: \"kubernetes.io/projected/a7b505b6-c3ce-4052-b99a-1c548bc69df6-kube-api-access-vtdwc\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.754328 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7b505b6-c3ce-4052-b99a-1c548bc69df6-run-httpd\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.754372 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.754410 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.754438 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-config-data\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.754848 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7b505b6-c3ce-4052-b99a-1c548bc69df6-log-httpd\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.857342 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7b505b6-c3ce-4052-b99a-1c548bc69df6-log-httpd\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.857431 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-scripts\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.857465 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtdwc\" (UniqueName: \"kubernetes.io/projected/a7b505b6-c3ce-4052-b99a-1c548bc69df6-kube-api-access-vtdwc\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.857516 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7b505b6-c3ce-4052-b99a-1c548bc69df6-run-httpd\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.857550 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.857582 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.857608 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-config-data\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.858849 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7b505b6-c3ce-4052-b99a-1c548bc69df6-log-httpd\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.859303 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7b505b6-c3ce-4052-b99a-1c548bc69df6-run-httpd\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.862080 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-scripts\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.862293 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.862783 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-config-data\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.864015 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.875481 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtdwc\" (UniqueName: \"kubernetes.io/projected/a7b505b6-c3ce-4052-b99a-1c548bc69df6-kube-api-access-vtdwc\") pod \"ceilometer-0\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " pod="openstack/ceilometer-0" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.957239 4899 scope.go:117] "RemoveContainer" containerID="c99c3d96feae301f4057e1f85506e6467cf8624d73bc05e0b90994ad7ee33ab1" Oct 03 08:57:47 crc kubenswrapper[4899]: I1003 08:57:47.980374 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:57:48 crc kubenswrapper[4899]: I1003 08:57:48.512632 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:57:48 crc kubenswrapper[4899]: I1003 08:57:48.554881 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="887fe423-dd24-43d2-b8df-dcdb615a9fda" path="/var/lib/kubelet/pods/887fe423-dd24-43d2-b8df-dcdb615a9fda/volumes" Oct 03 08:57:48 crc kubenswrapper[4899]: I1003 08:57:48.569849 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-54794d7d5c-64vlg" event={"ID":"088c667d-5a03-44d1-a2fc-c9de7910e5a8","Type":"ContainerStarted","Data":"bb11add0a755d438007636400dc53d13b8bc5d6f7778c9274df679c0979b99f8"} Oct 03 08:57:48 crc kubenswrapper[4899]: I1003 08:57:48.569934 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-54794d7d5c-64vlg" event={"ID":"088c667d-5a03-44d1-a2fc-c9de7910e5a8","Type":"ContainerStarted","Data":"0f3cf24acabfc10eed9e67e01e22ec8ff2d944530c120b40d4a3a82d2b32cd3a"} Oct 03 08:57:48 crc kubenswrapper[4899]: I1003 08:57:48.571071 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:48 crc kubenswrapper[4899]: I1003 08:57:48.571100 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:48 crc kubenswrapper[4899]: I1003 08:57:48.573481 4899 generic.go:334] "Generic (PLEG): container finished" podID="babb4e0d-c482-4cb4-8140-4bfc34ab2afc" containerID="41c347ba33df1ded9bfc575bc426f2f75d559d4b6655452d8ae4a029bccdd8d0" exitCode=0 Oct 03 08:57:48 crc kubenswrapper[4899]: I1003 08:57:48.573531 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-9d7r2" event={"ID":"babb4e0d-c482-4cb4-8140-4bfc34ab2afc","Type":"ContainerDied","Data":"41c347ba33df1ded9bfc575bc426f2f75d559d4b6655452d8ae4a029bccdd8d0"} Oct 03 08:57:48 crc kubenswrapper[4899]: I1003 08:57:48.573549 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-9d7r2" event={"ID":"babb4e0d-c482-4cb4-8140-4bfc34ab2afc","Type":"ContainerStarted","Data":"cab1b22429c04bda3e38c3e084e4fb200769552bc93ccefc9b52da9f23462c0a"} Oct 03 08:57:48 crc kubenswrapper[4899]: I1003 08:57:48.584056 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7b505b6-c3ce-4052-b99a-1c548bc69df6","Type":"ContainerStarted","Data":"a8363a4ae38310411e2ded9b90a71c6a66d26d835db378333b7b044f9e408682"} Oct 03 08:57:48 crc kubenswrapper[4899]: I1003 08:57:48.597060 4899 generic.go:334] "Generic (PLEG): container finished" podID="d14aaa2d-9ea7-4886-8464-effc840164f4" containerID="a91319d94bb8ccc1f351de2325d4f9530edc814ec3530255ed286e6449e5aae7" exitCode=0 Oct 03 08:57:48 crc kubenswrapper[4899]: I1003 08:57:48.597185 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-cxdjt" event={"ID":"d14aaa2d-9ea7-4886-8464-effc840164f4","Type":"ContainerDied","Data":"a91319d94bb8ccc1f351de2325d4f9530edc814ec3530255ed286e6449e5aae7"} Oct 03 08:57:48 crc kubenswrapper[4899]: I1003 08:57:48.599581 4899 generic.go:334] "Generic (PLEG): container finished" podID="1e579b0b-7373-4ab5-b543-3b58fa367f1a" containerID="a4476fbf54ecddbb11f66691aefd0fb12c79aa5f593225e5bc908a3e1fe6ef5a" exitCode=0 Oct 03 08:57:48 crc kubenswrapper[4899]: I1003 08:57:48.599655 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-6mwps" event={"ID":"1e579b0b-7373-4ab5-b543-3b58fa367f1a","Type":"ContainerDied","Data":"a4476fbf54ecddbb11f66691aefd0fb12c79aa5f593225e5bc908a3e1fe6ef5a"} Oct 03 08:57:48 crc kubenswrapper[4899]: I1003 08:57:48.599733 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-6mwps" event={"ID":"1e579b0b-7373-4ab5-b543-3b58fa367f1a","Type":"ContainerStarted","Data":"f5c34f2e3432efb89fc689801af8274d1067f3dc37cbc413fb7ab816c23f7296"} Oct 03 08:57:48 crc kubenswrapper[4899]: I1003 08:57:48.605806 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-54794d7d5c-64vlg" podStartSLOduration=9.605787019 podStartE2EDuration="9.605787019s" podCreationTimestamp="2025-10-03 08:57:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:57:48.587644966 +0000 UTC m=+1042.695129909" watchObservedRunningTime="2025-10-03 08:57:48.605787019 +0000 UTC m=+1042.713271972" Oct 03 08:57:48 crc kubenswrapper[4899]: I1003 08:57:48.622620 4899 generic.go:334] "Generic (PLEG): container finished" podID="405fb02a-9786-4dc3-a4a6-bf885f7a75fe" containerID="acbded3a0caa4a0ee411e002a31288813cc64cc755b63cb8ad83dfd1ea64b4fc" exitCode=0 Oct 03 08:57:48 crc kubenswrapper[4899]: I1003 08:57:48.622685 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"405fb02a-9786-4dc3-a4a6-bf885f7a75fe","Type":"ContainerDied","Data":"acbded3a0caa4a0ee411e002a31288813cc64cc755b63cb8ad83dfd1ea64b4fc"} Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.434424 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.492280 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-combined-ca-bundle\") pod \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.492351 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.492414 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-public-tls-certs\") pod \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.492483 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-config-data\") pod \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.492523 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-logs\") pod \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.492615 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kbv7d\" (UniqueName: \"kubernetes.io/projected/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-kube-api-access-kbv7d\") pod \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.492647 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-httpd-run\") pod \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.492725 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-scripts\") pod \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\" (UID: \"405fb02a-9786-4dc3-a4a6-bf885f7a75fe\") " Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.497752 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "405fb02a-9786-4dc3-a4a6-bf885f7a75fe" (UID: "405fb02a-9786-4dc3-a4a6-bf885f7a75fe"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.498222 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-logs" (OuterVolumeSpecName: "logs") pod "405fb02a-9786-4dc3-a4a6-bf885f7a75fe" (UID: "405fb02a-9786-4dc3-a4a6-bf885f7a75fe"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.510523 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-scripts" (OuterVolumeSpecName: "scripts") pod "405fb02a-9786-4dc3-a4a6-bf885f7a75fe" (UID: "405fb02a-9786-4dc3-a4a6-bf885f7a75fe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.511716 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "405fb02a-9786-4dc3-a4a6-bf885f7a75fe" (UID: "405fb02a-9786-4dc3-a4a6-bf885f7a75fe"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.534316 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-kube-api-access-kbv7d" (OuterVolumeSpecName: "kube-api-access-kbv7d") pod "405fb02a-9786-4dc3-a4a6-bf885f7a75fe" (UID: "405fb02a-9786-4dc3-a4a6-bf885f7a75fe"). InnerVolumeSpecName "kube-api-access-kbv7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.595812 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.595849 4899 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.595858 4899 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-logs\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.595866 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kbv7d\" (UniqueName: \"kubernetes.io/projected/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-kube-api-access-kbv7d\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.595874 4899 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.603016 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "405fb02a-9786-4dc3-a4a6-bf885f7a75fe" (UID: "405fb02a-9786-4dc3-a4a6-bf885f7a75fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.637600 4899 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.638496 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "405fb02a-9786-4dc3-a4a6-bf885f7a75fe" (UID: "405fb02a-9786-4dc3-a4a6-bf885f7a75fe"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.643040 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.643686 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"405fb02a-9786-4dc3-a4a6-bf885f7a75fe","Type":"ContainerDied","Data":"1baf933e6a1d312c404f2c0b7cdc150fe7becdbdcd9aa0e9e0b8c0cdae9da205"} Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.643748 4899 scope.go:117] "RemoveContainer" containerID="acbded3a0caa4a0ee411e002a31288813cc64cc755b63cb8ad83dfd1ea64b4fc" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.645610 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-config-data" (OuterVolumeSpecName: "config-data") pod "405fb02a-9786-4dc3-a4a6-bf885f7a75fe" (UID: "405fb02a-9786-4dc3-a4a6-bf885f7a75fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.698268 4899 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.698303 4899 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.698317 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.698328 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/405fb02a-9786-4dc3-a4a6-bf885f7a75fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:49 crc kubenswrapper[4899]: I1003 08:57:49.773261 4899 scope.go:117] "RemoveContainer" containerID="b3bcfd680d21dffd9a651646bb698ef6ce38b1311bd4b55221c2a36cfb915611" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:49.997936 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.013847 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.049369 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-cxdjt" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.053224 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 08:57:50 crc kubenswrapper[4899]: E1003 08:57:50.053677 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="405fb02a-9786-4dc3-a4a6-bf885f7a75fe" containerName="glance-log" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.053691 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="405fb02a-9786-4dc3-a4a6-bf885f7a75fe" containerName="glance-log" Oct 03 08:57:50 crc kubenswrapper[4899]: E1003 08:57:50.053705 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d14aaa2d-9ea7-4886-8464-effc840164f4" containerName="mariadb-database-create" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.053713 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="d14aaa2d-9ea7-4886-8464-effc840164f4" containerName="mariadb-database-create" Oct 03 08:57:50 crc kubenswrapper[4899]: E1003 08:57:50.053734 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="405fb02a-9786-4dc3-a4a6-bf885f7a75fe" containerName="glance-httpd" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.053743 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="405fb02a-9786-4dc3-a4a6-bf885f7a75fe" containerName="glance-httpd" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.053998 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="405fb02a-9786-4dc3-a4a6-bf885f7a75fe" containerName="glance-log" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.054022 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="d14aaa2d-9ea7-4886-8464-effc840164f4" containerName="mariadb-database-create" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.054034 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="405fb02a-9786-4dc3-a4a6-bf885f7a75fe" containerName="glance-httpd" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.055925 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.059321 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.059500 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.068887 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.114584 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l82h4\" (UniqueName: \"kubernetes.io/projected/d14aaa2d-9ea7-4886-8464-effc840164f4-kube-api-access-l82h4\") pod \"d14aaa2d-9ea7-4886-8464-effc840164f4\" (UID: \"d14aaa2d-9ea7-4886-8464-effc840164f4\") " Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.114845 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.114912 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.114947 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-logs\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.114982 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-scripts\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.115084 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-config-data\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.115128 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22w7q\" (UniqueName: \"kubernetes.io/projected/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-kube-api-access-22w7q\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.115220 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.115270 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.123493 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d14aaa2d-9ea7-4886-8464-effc840164f4-kube-api-access-l82h4" (OuterVolumeSpecName: "kube-api-access-l82h4") pod "d14aaa2d-9ea7-4886-8464-effc840164f4" (UID: "d14aaa2d-9ea7-4886-8464-effc840164f4"). InnerVolumeSpecName "kube-api-access-l82h4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.188842 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-9d7r2" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.192522 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6mwps" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.216883 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzh44\" (UniqueName: \"kubernetes.io/projected/1e579b0b-7373-4ab5-b543-3b58fa367f1a-kube-api-access-xzh44\") pod \"1e579b0b-7373-4ab5-b543-3b58fa367f1a\" (UID: \"1e579b0b-7373-4ab5-b543-3b58fa367f1a\") " Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.217016 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2n5q4\" (UniqueName: \"kubernetes.io/projected/babb4e0d-c482-4cb4-8140-4bfc34ab2afc-kube-api-access-2n5q4\") pod \"babb4e0d-c482-4cb4-8140-4bfc34ab2afc\" (UID: \"babb4e0d-c482-4cb4-8140-4bfc34ab2afc\") " Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.217452 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-config-data\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.217508 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22w7q\" (UniqueName: \"kubernetes.io/projected/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-kube-api-access-22w7q\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.217547 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.217585 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.217642 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.217678 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.217712 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-logs\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.217744 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-scripts\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.217808 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l82h4\" (UniqueName: \"kubernetes.io/projected/d14aaa2d-9ea7-4886-8464-effc840164f4-kube-api-access-l82h4\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.220846 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e579b0b-7373-4ab5-b543-3b58fa367f1a-kube-api-access-xzh44" (OuterVolumeSpecName: "kube-api-access-xzh44") pod "1e579b0b-7373-4ab5-b543-3b58fa367f1a" (UID: "1e579b0b-7373-4ab5-b543-3b58fa367f1a"). InnerVolumeSpecName "kube-api-access-xzh44". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.221949 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.223383 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.224043 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-logs\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.224420 4899 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.225620 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-scripts\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.228319 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/babb4e0d-c482-4cb4-8140-4bfc34ab2afc-kube-api-access-2n5q4" (OuterVolumeSpecName: "kube-api-access-2n5q4") pod "babb4e0d-c482-4cb4-8140-4bfc34ab2afc" (UID: "babb4e0d-c482-4cb4-8140-4bfc34ab2afc"). InnerVolumeSpecName "kube-api-access-2n5q4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.228903 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-config-data\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.229681 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.244851 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22w7q\" (UniqueName: \"kubernetes.io/projected/aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee-kube-api-access-22w7q\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.290983 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee\") " pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.325470 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2n5q4\" (UniqueName: \"kubernetes.io/projected/babb4e0d-c482-4cb4-8140-4bfc34ab2afc-kube-api-access-2n5q4\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.325511 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzh44\" (UniqueName: \"kubernetes.io/projected/1e579b0b-7373-4ab5-b543-3b58fa367f1a-kube-api-access-xzh44\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.376790 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.377952 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.426232 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-config-data\") pod \"894a907e-c584-4671-9ba8-e4b1df804b5b\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.426318 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kf9kd\" (UniqueName: \"kubernetes.io/projected/894a907e-c584-4671-9ba8-e4b1df804b5b-kube-api-access-kf9kd\") pod \"894a907e-c584-4671-9ba8-e4b1df804b5b\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.426389 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/894a907e-c584-4671-9ba8-e4b1df804b5b-logs\") pod \"894a907e-c584-4671-9ba8-e4b1df804b5b\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.426421 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-internal-tls-certs\") pod \"894a907e-c584-4671-9ba8-e4b1df804b5b\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.426449 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-combined-ca-bundle\") pod \"894a907e-c584-4671-9ba8-e4b1df804b5b\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.426581 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"894a907e-c584-4671-9ba8-e4b1df804b5b\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.426600 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/894a907e-c584-4671-9ba8-e4b1df804b5b-httpd-run\") pod \"894a907e-c584-4671-9ba8-e4b1df804b5b\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.426644 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-scripts\") pod \"894a907e-c584-4671-9ba8-e4b1df804b5b\" (UID: \"894a907e-c584-4671-9ba8-e4b1df804b5b\") " Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.427576 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/894a907e-c584-4671-9ba8-e4b1df804b5b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "894a907e-c584-4671-9ba8-e4b1df804b5b" (UID: "894a907e-c584-4671-9ba8-e4b1df804b5b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.427862 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/894a907e-c584-4671-9ba8-e4b1df804b5b-logs" (OuterVolumeSpecName: "logs") pod "894a907e-c584-4671-9ba8-e4b1df804b5b" (UID: "894a907e-c584-4671-9ba8-e4b1df804b5b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.436553 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "894a907e-c584-4671-9ba8-e4b1df804b5b" (UID: "894a907e-c584-4671-9ba8-e4b1df804b5b"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.436679 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/894a907e-c584-4671-9ba8-e4b1df804b5b-kube-api-access-kf9kd" (OuterVolumeSpecName: "kube-api-access-kf9kd") pod "894a907e-c584-4671-9ba8-e4b1df804b5b" (UID: "894a907e-c584-4671-9ba8-e4b1df804b5b"). InnerVolumeSpecName "kube-api-access-kf9kd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.453057 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-scripts" (OuterVolumeSpecName: "scripts") pod "894a907e-c584-4671-9ba8-e4b1df804b5b" (UID: "894a907e-c584-4671-9ba8-e4b1df804b5b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.491198 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "894a907e-c584-4671-9ba8-e4b1df804b5b" (UID: "894a907e-c584-4671-9ba8-e4b1df804b5b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.517700 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-config-data" (OuterVolumeSpecName: "config-data") pod "894a907e-c584-4671-9ba8-e4b1df804b5b" (UID: "894a907e-c584-4671-9ba8-e4b1df804b5b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.530543 4899 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/894a907e-c584-4671-9ba8-e4b1df804b5b-logs\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.530584 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.530610 4899 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.530623 4899 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/894a907e-c584-4671-9ba8-e4b1df804b5b-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.530635 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.530645 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.530656 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kf9kd\" (UniqueName: \"kubernetes.io/projected/894a907e-c584-4671-9ba8-e4b1df804b5b-kube-api-access-kf9kd\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.569429 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="405fb02a-9786-4dc3-a4a6-bf885f7a75fe" path="/var/lib/kubelet/pods/405fb02a-9786-4dc3-a4a6-bf885f7a75fe/volumes" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.576086 4899 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.616062 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "894a907e-c584-4671-9ba8-e4b1df804b5b" (UID: "894a907e-c584-4671-9ba8-e4b1df804b5b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.634218 4899 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/894a907e-c584-4671-9ba8-e4b1df804b5b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.634245 4899 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.701523 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-9d7r2" event={"ID":"babb4e0d-c482-4cb4-8140-4bfc34ab2afc","Type":"ContainerDied","Data":"cab1b22429c04bda3e38c3e084e4fb200769552bc93ccefc9b52da9f23462c0a"} Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.701573 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cab1b22429c04bda3e38c3e084e4fb200769552bc93ccefc9b52da9f23462c0a" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.701648 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-9d7r2" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.710739 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7b505b6-c3ce-4052-b99a-1c548bc69df6","Type":"ContainerStarted","Data":"a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006"} Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.710779 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7b505b6-c3ce-4052-b99a-1c548bc69df6","Type":"ContainerStarted","Data":"53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b"} Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.720098 4899 generic.go:334] "Generic (PLEG): container finished" podID="894a907e-c584-4671-9ba8-e4b1df804b5b" containerID="d8e9a0d728d0a78bc45d8f5dc222e918da9039d54b35fd14862f4518b06a1ebc" exitCode=0 Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.720191 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"894a907e-c584-4671-9ba8-e4b1df804b5b","Type":"ContainerDied","Data":"d8e9a0d728d0a78bc45d8f5dc222e918da9039d54b35fd14862f4518b06a1ebc"} Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.720240 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"894a907e-c584-4671-9ba8-e4b1df804b5b","Type":"ContainerDied","Data":"df155f8e43821a3d0b1747326566060cda5fd8fe0638167c40ac98ad9358bfdd"} Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.720262 4899 scope.go:117] "RemoveContainer" containerID="d8e9a0d728d0a78bc45d8f5dc222e918da9039d54b35fd14862f4518b06a1ebc" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.720419 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.729640 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-cxdjt" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.729636 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-cxdjt" event={"ID":"d14aaa2d-9ea7-4886-8464-effc840164f4","Type":"ContainerDied","Data":"5025e15b8bd8f20e317e5ef6a5d0aa26cac5bed33551c34811f96ab67227fb71"} Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.730255 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5025e15b8bd8f20e317e5ef6a5d0aa26cac5bed33551c34811f96ab67227fb71" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.735863 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6mwps" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.741377 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-6mwps" event={"ID":"1e579b0b-7373-4ab5-b543-3b58fa367f1a","Type":"ContainerDied","Data":"f5c34f2e3432efb89fc689801af8274d1067f3dc37cbc413fb7ab816c23f7296"} Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.741449 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5c34f2e3432efb89fc689801af8274d1067f3dc37cbc413fb7ab816c23f7296" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.788513 4899 scope.go:117] "RemoveContainer" containerID="32a3e5375a1223b70531d108e09150a9f5890189799bfa1dc1526e56b37f19b1" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.815731 4899 scope.go:117] "RemoveContainer" containerID="d8e9a0d728d0a78bc45d8f5dc222e918da9039d54b35fd14862f4518b06a1ebc" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.817190 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 08:57:50 crc kubenswrapper[4899]: E1003 08:57:50.819122 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8e9a0d728d0a78bc45d8f5dc222e918da9039d54b35fd14862f4518b06a1ebc\": container with ID starting with d8e9a0d728d0a78bc45d8f5dc222e918da9039d54b35fd14862f4518b06a1ebc not found: ID does not exist" containerID="d8e9a0d728d0a78bc45d8f5dc222e918da9039d54b35fd14862f4518b06a1ebc" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.819185 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8e9a0d728d0a78bc45d8f5dc222e918da9039d54b35fd14862f4518b06a1ebc"} err="failed to get container status \"d8e9a0d728d0a78bc45d8f5dc222e918da9039d54b35fd14862f4518b06a1ebc\": rpc error: code = NotFound desc = could not find container \"d8e9a0d728d0a78bc45d8f5dc222e918da9039d54b35fd14862f4518b06a1ebc\": container with ID starting with d8e9a0d728d0a78bc45d8f5dc222e918da9039d54b35fd14862f4518b06a1ebc not found: ID does not exist" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.819207 4899 scope.go:117] "RemoveContainer" containerID="32a3e5375a1223b70531d108e09150a9f5890189799bfa1dc1526e56b37f19b1" Oct 03 08:57:50 crc kubenswrapper[4899]: E1003 08:57:50.819623 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32a3e5375a1223b70531d108e09150a9f5890189799bfa1dc1526e56b37f19b1\": container with ID starting with 32a3e5375a1223b70531d108e09150a9f5890189799bfa1dc1526e56b37f19b1 not found: ID does not exist" containerID="32a3e5375a1223b70531d108e09150a9f5890189799bfa1dc1526e56b37f19b1" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.819663 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32a3e5375a1223b70531d108e09150a9f5890189799bfa1dc1526e56b37f19b1"} err="failed to get container status \"32a3e5375a1223b70531d108e09150a9f5890189799bfa1dc1526e56b37f19b1\": rpc error: code = NotFound desc = could not find container \"32a3e5375a1223b70531d108e09150a9f5890189799bfa1dc1526e56b37f19b1\": container with ID starting with 32a3e5375a1223b70531d108e09150a9f5890189799bfa1dc1526e56b37f19b1 not found: ID does not exist" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.837963 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.846280 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 08:57:50 crc kubenswrapper[4899]: E1003 08:57:50.846688 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="babb4e0d-c482-4cb4-8140-4bfc34ab2afc" containerName="mariadb-database-create" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.846708 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="babb4e0d-c482-4cb4-8140-4bfc34ab2afc" containerName="mariadb-database-create" Oct 03 08:57:50 crc kubenswrapper[4899]: E1003 08:57:50.846725 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e579b0b-7373-4ab5-b543-3b58fa367f1a" containerName="mariadb-database-create" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.846732 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e579b0b-7373-4ab5-b543-3b58fa367f1a" containerName="mariadb-database-create" Oct 03 08:57:50 crc kubenswrapper[4899]: E1003 08:57:50.846756 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894a907e-c584-4671-9ba8-e4b1df804b5b" containerName="glance-httpd" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.846762 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="894a907e-c584-4671-9ba8-e4b1df804b5b" containerName="glance-httpd" Oct 03 08:57:50 crc kubenswrapper[4899]: E1003 08:57:50.846773 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894a907e-c584-4671-9ba8-e4b1df804b5b" containerName="glance-log" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.846782 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="894a907e-c584-4671-9ba8-e4b1df804b5b" containerName="glance-log" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.846972 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e579b0b-7373-4ab5-b543-3b58fa367f1a" containerName="mariadb-database-create" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.846991 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="894a907e-c584-4671-9ba8-e4b1df804b5b" containerName="glance-httpd" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.847006 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="894a907e-c584-4671-9ba8-e4b1df804b5b" containerName="glance-log" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.847021 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="babb4e0d-c482-4cb4-8140-4bfc34ab2afc" containerName="mariadb-database-create" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.847951 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.852437 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.852606 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.871549 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.883175 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5bcb4b4796-x4jmr" podUID="2eba2a41-6a16-4f77-a699-157fb6fa7b3f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.883284 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.940796 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64f5461f-c255-4b93-9d86-65321f2dc74b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.940986 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64f5461f-c255-4b93-9d86-65321f2dc74b-logs\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.941018 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64f5461f-c255-4b93-9d86-65321f2dc74b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.941035 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/64f5461f-c255-4b93-9d86-65321f2dc74b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.941052 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.941119 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64f5461f-c255-4b93-9d86-65321f2dc74b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.941137 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwhlm\" (UniqueName: \"kubernetes.io/projected/64f5461f-c255-4b93-9d86-65321f2dc74b-kube-api-access-fwhlm\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:50 crc kubenswrapper[4899]: I1003 08:57:50.941158 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/64f5461f-c255-4b93-9d86-65321f2dc74b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.043708 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64f5461f-c255-4b93-9d86-65321f2dc74b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.044013 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/64f5461f-c255-4b93-9d86-65321f2dc74b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.044036 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.044337 4899 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.044593 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/64f5461f-c255-4b93-9d86-65321f2dc74b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.044590 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64f5461f-c255-4b93-9d86-65321f2dc74b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.044663 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwhlm\" (UniqueName: \"kubernetes.io/projected/64f5461f-c255-4b93-9d86-65321f2dc74b-kube-api-access-fwhlm\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.044706 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/64f5461f-c255-4b93-9d86-65321f2dc74b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.044972 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64f5461f-c255-4b93-9d86-65321f2dc74b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.045084 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64f5461f-c255-4b93-9d86-65321f2dc74b-logs\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.045380 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64f5461f-c255-4b93-9d86-65321f2dc74b-logs\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.047832 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64f5461f-c255-4b93-9d86-65321f2dc74b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.060191 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/64f5461f-c255-4b93-9d86-65321f2dc74b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.060245 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64f5461f-c255-4b93-9d86-65321f2dc74b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.062279 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64f5461f-c255-4b93-9d86-65321f2dc74b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.062986 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwhlm\" (UniqueName: \"kubernetes.io/projected/64f5461f-c255-4b93-9d86-65321f2dc74b-kube-api-access-fwhlm\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.084397 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"64f5461f-c255-4b93-9d86-65321f2dc74b\") " pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.170264 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.239828 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.744111 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee","Type":"ContainerStarted","Data":"8753b85ec66bbe77bacd623aa8be690974a24bb8c68a3a0127458c0c049868e8"} Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.748471 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7b505b6-c3ce-4052-b99a-1c548bc69df6","Type":"ContainerStarted","Data":"46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98"} Oct 03 08:57:51 crc kubenswrapper[4899]: I1003 08:57:51.792678 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 08:57:52 crc kubenswrapper[4899]: I1003 08:57:52.538259 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="894a907e-c584-4671-9ba8-e4b1df804b5b" path="/var/lib/kubelet/pods/894a907e-c584-4671-9ba8-e4b1df804b5b/volumes" Oct 03 08:57:52 crc kubenswrapper[4899]: I1003 08:57:52.768980 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"64f5461f-c255-4b93-9d86-65321f2dc74b","Type":"ContainerStarted","Data":"13781675b04baa0b562a3b10b3e054bf6d73b181f2b26f93694b22cd66a7bc49"} Oct 03 08:57:52 crc kubenswrapper[4899]: I1003 08:57:52.769283 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"64f5461f-c255-4b93-9d86-65321f2dc74b","Type":"ContainerStarted","Data":"78adabfec4819547d904acf1917b6f1e7f21907a2389e7b71252dd45a5b682ed"} Oct 03 08:57:52 crc kubenswrapper[4899]: I1003 08:57:52.776907 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7b505b6-c3ce-4052-b99a-1c548bc69df6","Type":"ContainerStarted","Data":"9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172"} Oct 03 08:57:52 crc kubenswrapper[4899]: I1003 08:57:52.778482 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 08:57:52 crc kubenswrapper[4899]: I1003 08:57:52.783218 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee","Type":"ContainerStarted","Data":"39e1842fe6db01b5338ea6a7b4c30b4d10a73e1eeaef517a1127db93ef865975"} Oct 03 08:57:52 crc kubenswrapper[4899]: I1003 08:57:52.783269 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee","Type":"ContainerStarted","Data":"edf2e605d64aec67ddee75a37190435a8ac96abd6924571664c73b6af002036e"} Oct 03 08:57:52 crc kubenswrapper[4899]: I1003 08:57:52.812216 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.919694807 podStartE2EDuration="5.812198145s" podCreationTimestamp="2025-10-03 08:57:47 +0000 UTC" firstStartedPulling="2025-10-03 08:57:48.525346492 +0000 UTC m=+1042.632831445" lastFinishedPulling="2025-10-03 08:57:52.41784983 +0000 UTC m=+1046.525334783" observedRunningTime="2025-10-03 08:57:52.805055639 +0000 UTC m=+1046.912540632" watchObservedRunningTime="2025-10-03 08:57:52.812198145 +0000 UTC m=+1046.919683098" Oct 03 08:57:52 crc kubenswrapper[4899]: I1003 08:57:52.830780 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.830760851 podStartE2EDuration="3.830760851s" podCreationTimestamp="2025-10-03 08:57:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:57:52.828983104 +0000 UTC m=+1046.936468057" watchObservedRunningTime="2025-10-03 08:57:52.830760851 +0000 UTC m=+1046.938245804" Oct 03 08:57:53 crc kubenswrapper[4899]: I1003 08:57:53.286795 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:57:53 crc kubenswrapper[4899]: I1003 08:57:53.793708 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"64f5461f-c255-4b93-9d86-65321f2dc74b","Type":"ContainerStarted","Data":"b601dfbc807f21429d60a12a9aa9394df609ee6eba6c6521a8cd119b63f77b25"} Oct 03 08:57:53 crc kubenswrapper[4899]: I1003 08:57:53.823485 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.823465885 podStartE2EDuration="3.823465885s" podCreationTimestamp="2025-10-03 08:57:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:57:53.820366546 +0000 UTC m=+1047.927851499" watchObservedRunningTime="2025-10-03 08:57:53.823465885 +0000 UTC m=+1047.930950838" Oct 03 08:57:54 crc kubenswrapper[4899]: I1003 08:57:54.802883 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerName="ceilometer-central-agent" containerID="cri-o://53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b" gracePeriod=30 Oct 03 08:57:54 crc kubenswrapper[4899]: I1003 08:57:54.804539 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerName="proxy-httpd" containerID="cri-o://9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172" gracePeriod=30 Oct 03 08:57:54 crc kubenswrapper[4899]: I1003 08:57:54.804820 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerName="sg-core" containerID="cri-o://46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98" gracePeriod=30 Oct 03 08:57:54 crc kubenswrapper[4899]: I1003 08:57:54.804873 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerName="ceilometer-notification-agent" containerID="cri-o://a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006" gracePeriod=30 Oct 03 08:57:55 crc kubenswrapper[4899]: W1003 08:57:55.320929 4899 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e579b0b_7373_4ab5_b543_3b58fa367f1a.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e579b0b_7373_4ab5_b543_3b58fa367f1a.slice: no such file or directory Oct 03 08:57:55 crc kubenswrapper[4899]: W1003 08:57:55.321663 4899 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbabb4e0d_c482_4cb4_8140_4bfc34ab2afc.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbabb4e0d_c482_4cb4_8140_4bfc34ab2afc.slice: no such file or directory Oct 03 08:57:55 crc kubenswrapper[4899]: W1003 08:57:55.321714 4899 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd14aaa2d_9ea7_4886_8464_effc840164f4.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd14aaa2d_9ea7_4886_8464_effc840164f4.slice: no such file or directory Oct 03 08:57:55 crc kubenswrapper[4899]: E1003 08:57:55.341411 4899 manager.go:1116] Failed to create existing container: /kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf9015e0_4ee8_45f0_9788_bc7060d4c8e5.slice/crio-b20bb2156e6df48d5e85d42d4b3280db47d43f063c1cac2b68cb959d4789dfff: Error finding container b20bb2156e6df48d5e85d42d4b3280db47d43f063c1cac2b68cb959d4789dfff: Status 404 returned error can't find the container with id b20bb2156e6df48d5e85d42d4b3280db47d43f063c1cac2b68cb959d4789dfff Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.366052 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.373275 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-54794d7d5c-64vlg" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.601797 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:57:55 crc kubenswrapper[4899]: E1003 08:57:55.674957 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod405fb02a_9786_4dc3_a4a6_bf885f7a75fe.slice/crio-1baf933e6a1d312c404f2c0b7cdc150fe7becdbdcd9aa0e9e0b8c0cdae9da205\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod405fb02a_9786_4dc3_a4a6_bf885f7a75fe.slice/crio-conmon-b3bcfd680d21dffd9a651646bb698ef6ce38b1311bd4b55221c2a36cfb915611.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod887fe423_dd24_43d2_b8df_dcdb615a9fda.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod405fb02a_9786_4dc3_a4a6_bf885f7a75fe.slice/crio-acbded3a0caa4a0ee411e002a31288813cc64cc755b63cb8ad83dfd1ea64b4fc.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2eba2a41_6a16_4f77_a699_157fb6fa7b3f.slice/crio-8c3b5ed2a58fe798c1bcc02dbfbe942f01e86c53e3fd06877aa330e11f9e69c3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod894a907e_c584_4671_9ba8_e4b1df804b5b.slice/crio-32a3e5375a1223b70531d108e09150a9f5890189799bfa1dc1526e56b37f19b1.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod894a907e_c584_4671_9ba8_e4b1df804b5b.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod894a907e_c584_4671_9ba8_e4b1df804b5b.slice/crio-conmon-32a3e5375a1223b70531d108e09150a9f5890189799bfa1dc1526e56b37f19b1.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod887fe423_dd24_43d2_b8df_dcdb615a9fda.slice/crio-conmon-c99c3d96feae301f4057e1f85506e6467cf8624d73bc05e0b90994ad7ee33ab1.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod887fe423_dd24_43d2_b8df_dcdb615a9fda.slice/crio-1c884c460a557ddbe7a2d80e877f9b451067f43ea5100282946aeeaa34749a7c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod887fe423_dd24_43d2_b8df_dcdb615a9fda.slice/crio-conmon-2040c6b23ed3c5c1bd63344be807abc82e0ff9c097c1f3b785e7dd768f695421.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod887fe423_dd24_43d2_b8df_dcdb615a9fda.slice/crio-2040c6b23ed3c5c1bd63344be807abc82e0ff9c097c1f3b785e7dd768f695421.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod894a907e_c584_4671_9ba8_e4b1df804b5b.slice/crio-d8e9a0d728d0a78bc45d8f5dc222e918da9039d54b35fd14862f4518b06a1ebc.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod894a907e_c584_4671_9ba8_e4b1df804b5b.slice/crio-conmon-d8e9a0d728d0a78bc45d8f5dc222e918da9039d54b35fd14862f4518b06a1ebc.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod887fe423_dd24_43d2_b8df_dcdb615a9fda.slice/crio-d7eaaf291f50e8c8248d5764ccf06be5add2e8200c103d5497c6dde8edf8acfb\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod887fe423_dd24_43d2_b8df_dcdb615a9fda.slice/crio-conmon-851de5c66a36f80c52feca8dd4990d1e41af9a152781dcc082795d2e88df93e9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod405fb02a_9786_4dc3_a4a6_bf885f7a75fe.slice/crio-b3bcfd680d21dffd9a651646bb698ef6ce38b1311bd4b55221c2a36cfb915611.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod887fe423_dd24_43d2_b8df_dcdb615a9fda.slice/crio-conmon-1c884c460a557ddbe7a2d80e877f9b451067f43ea5100282946aeeaa34749a7c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod894a907e_c584_4671_9ba8_e4b1df804b5b.slice/crio-df155f8e43821a3d0b1747326566060cda5fd8fe0638167c40ac98ad9358bfdd\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod887fe423_dd24_43d2_b8df_dcdb615a9fda.slice/crio-851de5c66a36f80c52feca8dd4990d1e41af9a152781dcc082795d2e88df93e9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod405fb02a_9786_4dc3_a4a6_bf885f7a75fe.slice/crio-conmon-acbded3a0caa4a0ee411e002a31288813cc64cc755b63cb8ad83dfd1ea64b4fc.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod405fb02a_9786_4dc3_a4a6_bf885f7a75fe.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod887fe423_dd24_43d2_b8df_dcdb615a9fda.slice/crio-c99c3d96feae301f4057e1f85506e6467cf8624d73bc05e0b90994ad7ee33ab1.scope\": RecentStats: unable to find data in memory cache]" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.717298 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.780628 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-scripts\") pod \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.780716 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtdwc\" (UniqueName: \"kubernetes.io/projected/a7b505b6-c3ce-4052-b99a-1c548bc69df6-kube-api-access-vtdwc\") pod \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.780750 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7b505b6-c3ce-4052-b99a-1c548bc69df6-run-httpd\") pod \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.780803 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-sg-core-conf-yaml\") pod \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.780922 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-config-data\") pod \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.780980 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7b505b6-c3ce-4052-b99a-1c548bc69df6-log-httpd\") pod \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.781074 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-combined-ca-bundle\") pod \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\" (UID: \"a7b505b6-c3ce-4052-b99a-1c548bc69df6\") " Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.782906 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7b505b6-c3ce-4052-b99a-1c548bc69df6-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a7b505b6-c3ce-4052-b99a-1c548bc69df6" (UID: "a7b505b6-c3ce-4052-b99a-1c548bc69df6"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.783361 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7b505b6-c3ce-4052-b99a-1c548bc69df6-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a7b505b6-c3ce-4052-b99a-1c548bc69df6" (UID: "a7b505b6-c3ce-4052-b99a-1c548bc69df6"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.788362 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7b505b6-c3ce-4052-b99a-1c548bc69df6-kube-api-access-vtdwc" (OuterVolumeSpecName: "kube-api-access-vtdwc") pod "a7b505b6-c3ce-4052-b99a-1c548bc69df6" (UID: "a7b505b6-c3ce-4052-b99a-1c548bc69df6"). InnerVolumeSpecName "kube-api-access-vtdwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.788907 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-scripts" (OuterVolumeSpecName: "scripts") pod "a7b505b6-c3ce-4052-b99a-1c548bc69df6" (UID: "a7b505b6-c3ce-4052-b99a-1c548bc69df6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.813307 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a7b505b6-c3ce-4052-b99a-1c548bc69df6" (UID: "a7b505b6-c3ce-4052-b99a-1c548bc69df6"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.817258 4899 generic.go:334] "Generic (PLEG): container finished" podID="2eba2a41-6a16-4f77-a699-157fb6fa7b3f" containerID="8c3b5ed2a58fe798c1bcc02dbfbe942f01e86c53e3fd06877aa330e11f9e69c3" exitCode=137 Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.817379 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5bcb4b4796-x4jmr" event={"ID":"2eba2a41-6a16-4f77-a699-157fb6fa7b3f","Type":"ContainerDied","Data":"8c3b5ed2a58fe798c1bcc02dbfbe942f01e86c53e3fd06877aa330e11f9e69c3"} Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.817415 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5bcb4b4796-x4jmr" event={"ID":"2eba2a41-6a16-4f77-a699-157fb6fa7b3f","Type":"ContainerDied","Data":"b5d89b934bc4a692179d707b8a18e73008072ebcc53eaff4c9a58e3f77622507"} Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.817482 4899 scope.go:117] "RemoveContainer" containerID="2af927b92552cb7d46e1ad7165e03c49307e20b3511b616a846aab7a5ac52533" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.817692 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5bcb4b4796-x4jmr" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.825408 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.825490 4899 generic.go:334] "Generic (PLEG): container finished" podID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerID="9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172" exitCode=0 Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.825512 4899 generic.go:334] "Generic (PLEG): container finished" podID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerID="46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98" exitCode=2 Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.825522 4899 generic.go:334] "Generic (PLEG): container finished" podID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerID="a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006" exitCode=0 Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.825531 4899 generic.go:334] "Generic (PLEG): container finished" podID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerID="53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b" exitCode=0 Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.825574 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7b505b6-c3ce-4052-b99a-1c548bc69df6","Type":"ContainerDied","Data":"9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172"} Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.825596 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7b505b6-c3ce-4052-b99a-1c548bc69df6","Type":"ContainerDied","Data":"46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98"} Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.825607 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7b505b6-c3ce-4052-b99a-1c548bc69df6","Type":"ContainerDied","Data":"a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006"} Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.825616 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7b505b6-c3ce-4052-b99a-1c548bc69df6","Type":"ContainerDied","Data":"53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b"} Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.825625 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7b505b6-c3ce-4052-b99a-1c548bc69df6","Type":"ContainerDied","Data":"a8363a4ae38310411e2ded9b90a71c6a66d26d835db378333b7b044f9e408682"} Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.870933 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a7b505b6-c3ce-4052-b99a-1c548bc69df6" (UID: "a7b505b6-c3ce-4052-b99a-1c548bc69df6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.882623 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-combined-ca-bundle\") pod \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.882699 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-config-data\") pod \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.882773 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-logs\") pod \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.882857 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-scripts\") pod \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.883282 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-logs" (OuterVolumeSpecName: "logs") pod "2eba2a41-6a16-4f77-a699-157fb6fa7b3f" (UID: "2eba2a41-6a16-4f77-a699-157fb6fa7b3f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.883421 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-horizon-secret-key\") pod \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.883445 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfb27\" (UniqueName: \"kubernetes.io/projected/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-kube-api-access-kfb27\") pod \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.883537 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-horizon-tls-certs\") pod \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\" (UID: \"2eba2a41-6a16-4f77-a699-157fb6fa7b3f\") " Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.884915 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.884932 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.884942 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtdwc\" (UniqueName: \"kubernetes.io/projected/a7b505b6-c3ce-4052-b99a-1c548bc69df6-kube-api-access-vtdwc\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.884951 4899 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7b505b6-c3ce-4052-b99a-1c548bc69df6-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.884959 4899 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.884969 4899 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-logs\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.884976 4899 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7b505b6-c3ce-4052-b99a-1c548bc69df6-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.888485 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-kube-api-access-kfb27" (OuterVolumeSpecName: "kube-api-access-kfb27") pod "2eba2a41-6a16-4f77-a699-157fb6fa7b3f" (UID: "2eba2a41-6a16-4f77-a699-157fb6fa7b3f"). InnerVolumeSpecName "kube-api-access-kfb27". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.888612 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "2eba2a41-6a16-4f77-a699-157fb6fa7b3f" (UID: "2eba2a41-6a16-4f77-a699-157fb6fa7b3f"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.904550 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-scripts" (OuterVolumeSpecName: "scripts") pod "2eba2a41-6a16-4f77-a699-157fb6fa7b3f" (UID: "2eba2a41-6a16-4f77-a699-157fb6fa7b3f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.907536 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-config-data" (OuterVolumeSpecName: "config-data") pod "2eba2a41-6a16-4f77-a699-157fb6fa7b3f" (UID: "2eba2a41-6a16-4f77-a699-157fb6fa7b3f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.908103 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2eba2a41-6a16-4f77-a699-157fb6fa7b3f" (UID: "2eba2a41-6a16-4f77-a699-157fb6fa7b3f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.910978 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-config-data" (OuterVolumeSpecName: "config-data") pod "a7b505b6-c3ce-4052-b99a-1c548bc69df6" (UID: "a7b505b6-c3ce-4052-b99a-1c548bc69df6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.933155 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "2eba2a41-6a16-4f77-a699-157fb6fa7b3f" (UID: "2eba2a41-6a16-4f77-a699-157fb6fa7b3f"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.986874 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfb27\" (UniqueName: \"kubernetes.io/projected/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-kube-api-access-kfb27\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.986921 4899 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.986931 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.986939 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.986950 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.986959 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7b505b6-c3ce-4052-b99a-1c548bc69df6-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:55 crc kubenswrapper[4899]: I1003 08:57:55.986969 4899 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2eba2a41-6a16-4f77-a699-157fb6fa7b3f-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.002359 4899 scope.go:117] "RemoveContainer" containerID="8c3b5ed2a58fe798c1bcc02dbfbe942f01e86c53e3fd06877aa330e11f9e69c3" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.041311 4899 scope.go:117] "RemoveContainer" containerID="2af927b92552cb7d46e1ad7165e03c49307e20b3511b616a846aab7a5ac52533" Oct 03 08:57:56 crc kubenswrapper[4899]: E1003 08:57:56.042369 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2af927b92552cb7d46e1ad7165e03c49307e20b3511b616a846aab7a5ac52533\": container with ID starting with 2af927b92552cb7d46e1ad7165e03c49307e20b3511b616a846aab7a5ac52533 not found: ID does not exist" containerID="2af927b92552cb7d46e1ad7165e03c49307e20b3511b616a846aab7a5ac52533" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.042432 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2af927b92552cb7d46e1ad7165e03c49307e20b3511b616a846aab7a5ac52533"} err="failed to get container status \"2af927b92552cb7d46e1ad7165e03c49307e20b3511b616a846aab7a5ac52533\": rpc error: code = NotFound desc = could not find container \"2af927b92552cb7d46e1ad7165e03c49307e20b3511b616a846aab7a5ac52533\": container with ID starting with 2af927b92552cb7d46e1ad7165e03c49307e20b3511b616a846aab7a5ac52533 not found: ID does not exist" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.042459 4899 scope.go:117] "RemoveContainer" containerID="8c3b5ed2a58fe798c1bcc02dbfbe942f01e86c53e3fd06877aa330e11f9e69c3" Oct 03 08:57:56 crc kubenswrapper[4899]: E1003 08:57:56.042850 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c3b5ed2a58fe798c1bcc02dbfbe942f01e86c53e3fd06877aa330e11f9e69c3\": container with ID starting with 8c3b5ed2a58fe798c1bcc02dbfbe942f01e86c53e3fd06877aa330e11f9e69c3 not found: ID does not exist" containerID="8c3b5ed2a58fe798c1bcc02dbfbe942f01e86c53e3fd06877aa330e11f9e69c3" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.042880 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c3b5ed2a58fe798c1bcc02dbfbe942f01e86c53e3fd06877aa330e11f9e69c3"} err="failed to get container status \"8c3b5ed2a58fe798c1bcc02dbfbe942f01e86c53e3fd06877aa330e11f9e69c3\": rpc error: code = NotFound desc = could not find container \"8c3b5ed2a58fe798c1bcc02dbfbe942f01e86c53e3fd06877aa330e11f9e69c3\": container with ID starting with 8c3b5ed2a58fe798c1bcc02dbfbe942f01e86c53e3fd06877aa330e11f9e69c3 not found: ID does not exist" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.042929 4899 scope.go:117] "RemoveContainer" containerID="9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.058541 4899 scope.go:117] "RemoveContainer" containerID="46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.085827 4899 scope.go:117] "RemoveContainer" containerID="a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.112169 4899 scope.go:117] "RemoveContainer" containerID="53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.138514 4899 scope.go:117] "RemoveContainer" containerID="9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172" Oct 03 08:57:56 crc kubenswrapper[4899]: E1003 08:57:56.142506 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172\": container with ID starting with 9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172 not found: ID does not exist" containerID="9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.149174 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172"} err="failed to get container status \"9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172\": rpc error: code = NotFound desc = could not find container \"9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172\": container with ID starting with 9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172 not found: ID does not exist" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.149225 4899 scope.go:117] "RemoveContainer" containerID="46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98" Oct 03 08:57:56 crc kubenswrapper[4899]: E1003 08:57:56.149805 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98\": container with ID starting with 46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98 not found: ID does not exist" containerID="46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.149879 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98"} err="failed to get container status \"46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98\": rpc error: code = NotFound desc = could not find container \"46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98\": container with ID starting with 46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98 not found: ID does not exist" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.149949 4899 scope.go:117] "RemoveContainer" containerID="a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006" Oct 03 08:57:56 crc kubenswrapper[4899]: E1003 08:57:56.151103 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006\": container with ID starting with a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006 not found: ID does not exist" containerID="a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.151140 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006"} err="failed to get container status \"a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006\": rpc error: code = NotFound desc = could not find container \"a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006\": container with ID starting with a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006 not found: ID does not exist" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.151159 4899 scope.go:117] "RemoveContainer" containerID="53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b" Oct 03 08:57:56 crc kubenswrapper[4899]: E1003 08:57:56.151522 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b\": container with ID starting with 53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b not found: ID does not exist" containerID="53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.151546 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b"} err="failed to get container status \"53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b\": rpc error: code = NotFound desc = could not find container \"53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b\": container with ID starting with 53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b not found: ID does not exist" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.151558 4899 scope.go:117] "RemoveContainer" containerID="9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.151764 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172"} err="failed to get container status \"9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172\": rpc error: code = NotFound desc = could not find container \"9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172\": container with ID starting with 9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172 not found: ID does not exist" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.151781 4899 scope.go:117] "RemoveContainer" containerID="46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.152498 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98"} err="failed to get container status \"46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98\": rpc error: code = NotFound desc = could not find container \"46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98\": container with ID starting with 46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98 not found: ID does not exist" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.155979 4899 scope.go:117] "RemoveContainer" containerID="a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.157062 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006"} err="failed to get container status \"a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006\": rpc error: code = NotFound desc = could not find container \"a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006\": container with ID starting with a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006 not found: ID does not exist" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.157107 4899 scope.go:117] "RemoveContainer" containerID="53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.157769 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b"} err="failed to get container status \"53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b\": rpc error: code = NotFound desc = could not find container \"53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b\": container with ID starting with 53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b not found: ID does not exist" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.157817 4899 scope.go:117] "RemoveContainer" containerID="9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.158159 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5bcb4b4796-x4jmr"] Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.158280 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172"} err="failed to get container status \"9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172\": rpc error: code = NotFound desc = could not find container \"9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172\": container with ID starting with 9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172 not found: ID does not exist" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.158351 4899 scope.go:117] "RemoveContainer" containerID="46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.159075 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98"} err="failed to get container status \"46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98\": rpc error: code = NotFound desc = could not find container \"46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98\": container with ID starting with 46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98 not found: ID does not exist" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.159108 4899 scope.go:117] "RemoveContainer" containerID="a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.159549 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006"} err="failed to get container status \"a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006\": rpc error: code = NotFound desc = could not find container \"a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006\": container with ID starting with a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006 not found: ID does not exist" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.159624 4899 scope.go:117] "RemoveContainer" containerID="53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.160213 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b"} err="failed to get container status \"53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b\": rpc error: code = NotFound desc = could not find container \"53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b\": container with ID starting with 53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b not found: ID does not exist" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.160249 4899 scope.go:117] "RemoveContainer" containerID="9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.160532 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172"} err="failed to get container status \"9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172\": rpc error: code = NotFound desc = could not find container \"9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172\": container with ID starting with 9c81baf9eded7c10c77302fc23702a09e72b2ad71b1be00d942c1962dfc41172 not found: ID does not exist" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.160555 4899 scope.go:117] "RemoveContainer" containerID="46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.160779 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98"} err="failed to get container status \"46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98\": rpc error: code = NotFound desc = could not find container \"46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98\": container with ID starting with 46e95043952bde80c56b270ad44ea769d364eb8008e5d9e5a9bfce089b93da98 not found: ID does not exist" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.160805 4899 scope.go:117] "RemoveContainer" containerID="a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.161013 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006"} err="failed to get container status \"a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006\": rpc error: code = NotFound desc = could not find container \"a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006\": container with ID starting with a6c9c35522a80c1f51546816b69e8f085bf9a3bc1c92213164d1bd627d7df006 not found: ID does not exist" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.161041 4899 scope.go:117] "RemoveContainer" containerID="53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.161454 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b"} err="failed to get container status \"53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b\": rpc error: code = NotFound desc = could not find container \"53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b\": container with ID starting with 53a4f93688d21f5c6046b271974cb5d25a846b7568aa88289f7c2058cef3569b not found: ID does not exist" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.171949 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5bcb4b4796-x4jmr"] Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.183655 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.207349 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.229438 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:57:56 crc kubenswrapper[4899]: E1003 08:57:56.229918 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2eba2a41-6a16-4f77-a699-157fb6fa7b3f" containerName="horizon" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.229936 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="2eba2a41-6a16-4f77-a699-157fb6fa7b3f" containerName="horizon" Oct 03 08:57:56 crc kubenswrapper[4899]: E1003 08:57:56.229948 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerName="ceilometer-central-agent" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.229958 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerName="ceilometer-central-agent" Oct 03 08:57:56 crc kubenswrapper[4899]: E1003 08:57:56.229989 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerName="sg-core" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.229996 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerName="sg-core" Oct 03 08:57:56 crc kubenswrapper[4899]: E1003 08:57:56.230018 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerName="ceilometer-notification-agent" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.230025 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerName="ceilometer-notification-agent" Oct 03 08:57:56 crc kubenswrapper[4899]: E1003 08:57:56.230038 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerName="proxy-httpd" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.230045 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerName="proxy-httpd" Oct 03 08:57:56 crc kubenswrapper[4899]: E1003 08:57:56.230055 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2eba2a41-6a16-4f77-a699-157fb6fa7b3f" containerName="horizon-log" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.230062 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="2eba2a41-6a16-4f77-a699-157fb6fa7b3f" containerName="horizon-log" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.230258 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerName="proxy-httpd" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.230277 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerName="sg-core" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.230293 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="2eba2a41-6a16-4f77-a699-157fb6fa7b3f" containerName="horizon-log" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.230303 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerName="ceilometer-central-agent" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.230318 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" containerName="ceilometer-notification-agent" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.230336 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="2eba2a41-6a16-4f77-a699-157fb6fa7b3f" containerName="horizon" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.232292 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.235532 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.235743 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.238652 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.392980 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.393039 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f18fe07f-71a4-447a-a596-edd9aa1b281d-run-httpd\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.393275 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-config-data\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.393348 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f18fe07f-71a4-447a-a596-edd9aa1b281d-log-httpd\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.393414 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-scripts\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.393474 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.393519 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5srsx\" (UniqueName: \"kubernetes.io/projected/f18fe07f-71a4-447a-a596-edd9aa1b281d-kube-api-access-5srsx\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.494914 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-scripts\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.494978 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.495014 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5srsx\" (UniqueName: \"kubernetes.io/projected/f18fe07f-71a4-447a-a596-edd9aa1b281d-kube-api-access-5srsx\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.495109 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.495148 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f18fe07f-71a4-447a-a596-edd9aa1b281d-run-httpd\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.495221 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-config-data\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.495251 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f18fe07f-71a4-447a-a596-edd9aa1b281d-log-httpd\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.495716 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f18fe07f-71a4-447a-a596-edd9aa1b281d-log-httpd\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.495935 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f18fe07f-71a4-447a-a596-edd9aa1b281d-run-httpd\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.499729 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-scripts\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.499738 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.502512 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.502527 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-config-data\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.515782 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5srsx\" (UniqueName: \"kubernetes.io/projected/f18fe07f-71a4-447a-a596-edd9aa1b281d-kube-api-access-5srsx\") pod \"ceilometer-0\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.538491 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2eba2a41-6a16-4f77-a699-157fb6fa7b3f" path="/var/lib/kubelet/pods/2eba2a41-6a16-4f77-a699-157fb6fa7b3f/volumes" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.539327 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7b505b6-c3ce-4052-b99a-1c548bc69df6" path="/var/lib/kubelet/pods/a7b505b6-c3ce-4052-b99a-1c548bc69df6/volumes" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.548247 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:57:56 crc kubenswrapper[4899]: I1003 08:57:56.996446 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:57:57 crc kubenswrapper[4899]: I1003 08:57:57.846305 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f18fe07f-71a4-447a-a596-edd9aa1b281d","Type":"ContainerStarted","Data":"dad7d517c2ac4fc5d9d06252237fb209e3dabac64f2eff636ef2e68b2ccaa567"} Oct 03 08:57:57 crc kubenswrapper[4899]: I1003 08:57:57.846871 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f18fe07f-71a4-447a-a596-edd9aa1b281d","Type":"ContainerStarted","Data":"68881ea014b23ed5a2000a93492fb85ad41b6d7a5a32e617c8382603802aa946"} Oct 03 08:57:58 crc kubenswrapper[4899]: I1003 08:57:58.859362 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f18fe07f-71a4-447a-a596-edd9aa1b281d","Type":"ContainerStarted","Data":"3d11bc7d73bf6c42fe41f9281afdeea816715b6c24915e1ce0159987413a0162"} Oct 03 08:57:59 crc kubenswrapper[4899]: I1003 08:57:59.871007 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f18fe07f-71a4-447a-a596-edd9aa1b281d","Type":"ContainerStarted","Data":"2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157"} Oct 03 08:58:00 crc kubenswrapper[4899]: I1003 08:58:00.378273 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 08:58:00 crc kubenswrapper[4899]: I1003 08:58:00.378656 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 08:58:00 crc kubenswrapper[4899]: I1003 08:58:00.416398 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 08:58:00 crc kubenswrapper[4899]: I1003 08:58:00.432303 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 08:58:00 crc kubenswrapper[4899]: I1003 08:58:00.884046 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f18fe07f-71a4-447a-a596-edd9aa1b281d","Type":"ContainerStarted","Data":"3b9ff524633fb490a541ec9780a4aaff9d9a6f5daa238a18757a7987640f510d"} Oct 03 08:58:00 crc kubenswrapper[4899]: I1003 08:58:00.885191 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 08:58:00 crc kubenswrapper[4899]: I1003 08:58:00.885212 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 08:58:00 crc kubenswrapper[4899]: I1003 08:58:00.912240 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.700860635 podStartE2EDuration="4.912221894s" podCreationTimestamp="2025-10-03 08:57:56 +0000 UTC" firstStartedPulling="2025-10-03 08:57:56.996200854 +0000 UTC m=+1051.103685807" lastFinishedPulling="2025-10-03 08:58:00.207562113 +0000 UTC m=+1054.315047066" observedRunningTime="2025-10-03 08:58:00.908853767 +0000 UTC m=+1055.016338730" watchObservedRunningTime="2025-10-03 08:58:00.912221894 +0000 UTC m=+1055.019706847" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.171046 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.171099 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.212666 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.227880 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.435836 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-1337-account-create-6dt5c"] Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.454721 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1337-account-create-6dt5c" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.458214 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.475655 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-1337-account-create-6dt5c"] Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.595830 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dct6w\" (UniqueName: \"kubernetes.io/projected/85a158ee-9e94-4664-b795-9fffb1fd2674-kube-api-access-dct6w\") pod \"nova-api-1337-account-create-6dt5c\" (UID: \"85a158ee-9e94-4664-b795-9fffb1fd2674\") " pod="openstack/nova-api-1337-account-create-6dt5c" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.624954 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-994c-account-create-6qslq"] Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.626787 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-994c-account-create-6qslq" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.629544 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.634446 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-994c-account-create-6qslq"] Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.699667 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dct6w\" (UniqueName: \"kubernetes.io/projected/85a158ee-9e94-4664-b795-9fffb1fd2674-kube-api-access-dct6w\") pod \"nova-api-1337-account-create-6dt5c\" (UID: \"85a158ee-9e94-4664-b795-9fffb1fd2674\") " pod="openstack/nova-api-1337-account-create-6dt5c" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.723651 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dct6w\" (UniqueName: \"kubernetes.io/projected/85a158ee-9e94-4664-b795-9fffb1fd2674-kube-api-access-dct6w\") pod \"nova-api-1337-account-create-6dt5c\" (UID: \"85a158ee-9e94-4664-b795-9fffb1fd2674\") " pod="openstack/nova-api-1337-account-create-6dt5c" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.789526 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1337-account-create-6dt5c" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.801511 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p554s\" (UniqueName: \"kubernetes.io/projected/7bd3bb54-4225-4123-b1e2-05bf17011397-kube-api-access-p554s\") pod \"nova-cell0-994c-account-create-6qslq\" (UID: \"7bd3bb54-4225-4123-b1e2-05bf17011397\") " pod="openstack/nova-cell0-994c-account-create-6qslq" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.835228 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-4370-account-create-zqsn8"] Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.836485 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4370-account-create-zqsn8" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.845608 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.850507 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-4370-account-create-zqsn8"] Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.898596 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.900263 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.900558 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.906732 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p554s\" (UniqueName: \"kubernetes.io/projected/7bd3bb54-4225-4123-b1e2-05bf17011397-kube-api-access-p554s\") pod \"nova-cell0-994c-account-create-6qslq\" (UID: \"7bd3bb54-4225-4123-b1e2-05bf17011397\") " pod="openstack/nova-cell0-994c-account-create-6qslq" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.929963 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p554s\" (UniqueName: \"kubernetes.io/projected/7bd3bb54-4225-4123-b1e2-05bf17011397-kube-api-access-p554s\") pod \"nova-cell0-994c-account-create-6qslq\" (UID: \"7bd3bb54-4225-4123-b1e2-05bf17011397\") " pod="openstack/nova-cell0-994c-account-create-6qslq" Oct 03 08:58:01 crc kubenswrapper[4899]: I1003 08:58:01.941641 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-994c-account-create-6qslq" Oct 03 08:58:02 crc kubenswrapper[4899]: I1003 08:58:02.009347 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk5sx\" (UniqueName: \"kubernetes.io/projected/e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784-kube-api-access-lk5sx\") pod \"nova-cell1-4370-account-create-zqsn8\" (UID: \"e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784\") " pod="openstack/nova-cell1-4370-account-create-zqsn8" Oct 03 08:58:02 crc kubenswrapper[4899]: I1003 08:58:02.113959 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk5sx\" (UniqueName: \"kubernetes.io/projected/e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784-kube-api-access-lk5sx\") pod \"nova-cell1-4370-account-create-zqsn8\" (UID: \"e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784\") " pod="openstack/nova-cell1-4370-account-create-zqsn8" Oct 03 08:58:02 crc kubenswrapper[4899]: I1003 08:58:02.143725 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk5sx\" (UniqueName: \"kubernetes.io/projected/e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784-kube-api-access-lk5sx\") pod \"nova-cell1-4370-account-create-zqsn8\" (UID: \"e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784\") " pod="openstack/nova-cell1-4370-account-create-zqsn8" Oct 03 08:58:02 crc kubenswrapper[4899]: W1003 08:58:02.334371 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85a158ee_9e94_4664_b795_9fffb1fd2674.slice/crio-7c4448ba977fb79728e7e356c26f446bc3a9b268d4127d05dbfe2bbf900a6cca WatchSource:0}: Error finding container 7c4448ba977fb79728e7e356c26f446bc3a9b268d4127d05dbfe2bbf900a6cca: Status 404 returned error can't find the container with id 7c4448ba977fb79728e7e356c26f446bc3a9b268d4127d05dbfe2bbf900a6cca Oct 03 08:58:02 crc kubenswrapper[4899]: I1003 08:58:02.340416 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-1337-account-create-6dt5c"] Oct 03 08:58:02 crc kubenswrapper[4899]: I1003 08:58:02.353686 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4370-account-create-zqsn8" Oct 03 08:58:02 crc kubenswrapper[4899]: I1003 08:58:02.480869 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-994c-account-create-6qslq"] Oct 03 08:58:02 crc kubenswrapper[4899]: I1003 08:58:02.808941 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-4370-account-create-zqsn8"] Oct 03 08:58:02 crc kubenswrapper[4899]: W1003 08:58:02.814639 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode3f0c5eb_8c4d_42bf_9cb2_5ed03cfa5784.slice/crio-8d35bdfc3426e4f5aa5254430576f7d21505a038f2d3e588a7df81921c286bba WatchSource:0}: Error finding container 8d35bdfc3426e4f5aa5254430576f7d21505a038f2d3e588a7df81921c286bba: Status 404 returned error can't find the container with id 8d35bdfc3426e4f5aa5254430576f7d21505a038f2d3e588a7df81921c286bba Oct 03 08:58:02 crc kubenswrapper[4899]: I1003 08:58:02.915496 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4370-account-create-zqsn8" event={"ID":"e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784","Type":"ContainerStarted","Data":"8d35bdfc3426e4f5aa5254430576f7d21505a038f2d3e588a7df81921c286bba"} Oct 03 08:58:02 crc kubenswrapper[4899]: I1003 08:58:02.929180 4899 generic.go:334] "Generic (PLEG): container finished" podID="7bd3bb54-4225-4123-b1e2-05bf17011397" containerID="df171ed54292357689c64ee4e5a2db694b9aff4216b1e0b2186b98ed5fd833c9" exitCode=0 Oct 03 08:58:02 crc kubenswrapper[4899]: I1003 08:58:02.929281 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-994c-account-create-6qslq" event={"ID":"7bd3bb54-4225-4123-b1e2-05bf17011397","Type":"ContainerDied","Data":"df171ed54292357689c64ee4e5a2db694b9aff4216b1e0b2186b98ed5fd833c9"} Oct 03 08:58:02 crc kubenswrapper[4899]: I1003 08:58:02.929313 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-994c-account-create-6qslq" event={"ID":"7bd3bb54-4225-4123-b1e2-05bf17011397","Type":"ContainerStarted","Data":"a180093d6aafcee948b596424d40888b1c1faf940754ef16f8603a5c3058c87b"} Oct 03 08:58:02 crc kubenswrapper[4899]: I1003 08:58:02.938457 4899 generic.go:334] "Generic (PLEG): container finished" podID="85a158ee-9e94-4664-b795-9fffb1fd2674" containerID="b28d0c718bf862515c9c5ab48a1ff617b5e50c09cc4795bdedbe0c04b1182fe9" exitCode=0 Oct 03 08:58:02 crc kubenswrapper[4899]: I1003 08:58:02.938545 4899 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 08:58:02 crc kubenswrapper[4899]: I1003 08:58:02.938553 4899 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 08:58:02 crc kubenswrapper[4899]: I1003 08:58:02.939376 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1337-account-create-6dt5c" event={"ID":"85a158ee-9e94-4664-b795-9fffb1fd2674","Type":"ContainerDied","Data":"b28d0c718bf862515c9c5ab48a1ff617b5e50c09cc4795bdedbe0c04b1182fe9"} Oct 03 08:58:02 crc kubenswrapper[4899]: I1003 08:58:02.939406 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1337-account-create-6dt5c" event={"ID":"85a158ee-9e94-4664-b795-9fffb1fd2674","Type":"ContainerStarted","Data":"7c4448ba977fb79728e7e356c26f446bc3a9b268d4127d05dbfe2bbf900a6cca"} Oct 03 08:58:03 crc kubenswrapper[4899]: I1003 08:58:03.142248 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 08:58:03 crc kubenswrapper[4899]: I1003 08:58:03.144660 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 08:58:03 crc kubenswrapper[4899]: I1003 08:58:03.948718 4899 generic.go:334] "Generic (PLEG): container finished" podID="e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784" containerID="a2c7047cd3233b03f2538a69a9a22242e27ea5ba9071c94e787d7e56d64d942f" exitCode=0 Oct 03 08:58:03 crc kubenswrapper[4899]: I1003 08:58:03.948797 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4370-account-create-zqsn8" event={"ID":"e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784","Type":"ContainerDied","Data":"a2c7047cd3233b03f2538a69a9a22242e27ea5ba9071c94e787d7e56d64d942f"} Oct 03 08:58:03 crc kubenswrapper[4899]: I1003 08:58:03.948961 4899 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 08:58:03 crc kubenswrapper[4899]: I1003 08:58:03.948975 4899 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 08:58:04 crc kubenswrapper[4899]: I1003 08:58:04.089644 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 08:58:04 crc kubenswrapper[4899]: I1003 08:58:04.093774 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 08:58:04 crc kubenswrapper[4899]: I1003 08:58:04.491610 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1337-account-create-6dt5c" Oct 03 08:58:04 crc kubenswrapper[4899]: I1003 08:58:04.514061 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-994c-account-create-6qslq" Oct 03 08:58:04 crc kubenswrapper[4899]: I1003 08:58:04.584424 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dct6w\" (UniqueName: \"kubernetes.io/projected/85a158ee-9e94-4664-b795-9fffb1fd2674-kube-api-access-dct6w\") pod \"85a158ee-9e94-4664-b795-9fffb1fd2674\" (UID: \"85a158ee-9e94-4664-b795-9fffb1fd2674\") " Oct 03 08:58:04 crc kubenswrapper[4899]: I1003 08:58:04.584531 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p554s\" (UniqueName: \"kubernetes.io/projected/7bd3bb54-4225-4123-b1e2-05bf17011397-kube-api-access-p554s\") pod \"7bd3bb54-4225-4123-b1e2-05bf17011397\" (UID: \"7bd3bb54-4225-4123-b1e2-05bf17011397\") " Oct 03 08:58:04 crc kubenswrapper[4899]: I1003 08:58:04.610886 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85a158ee-9e94-4664-b795-9fffb1fd2674-kube-api-access-dct6w" (OuterVolumeSpecName: "kube-api-access-dct6w") pod "85a158ee-9e94-4664-b795-9fffb1fd2674" (UID: "85a158ee-9e94-4664-b795-9fffb1fd2674"). InnerVolumeSpecName "kube-api-access-dct6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:58:04 crc kubenswrapper[4899]: I1003 08:58:04.612631 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bd3bb54-4225-4123-b1e2-05bf17011397-kube-api-access-p554s" (OuterVolumeSpecName: "kube-api-access-p554s") pod "7bd3bb54-4225-4123-b1e2-05bf17011397" (UID: "7bd3bb54-4225-4123-b1e2-05bf17011397"). InnerVolumeSpecName "kube-api-access-p554s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:58:04 crc kubenswrapper[4899]: I1003 08:58:04.687445 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dct6w\" (UniqueName: \"kubernetes.io/projected/85a158ee-9e94-4664-b795-9fffb1fd2674-kube-api-access-dct6w\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:04 crc kubenswrapper[4899]: I1003 08:58:04.687483 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p554s\" (UniqueName: \"kubernetes.io/projected/7bd3bb54-4225-4123-b1e2-05bf17011397-kube-api-access-p554s\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:04 crc kubenswrapper[4899]: I1003 08:58:04.957247 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1337-account-create-6dt5c" event={"ID":"85a158ee-9e94-4664-b795-9fffb1fd2674","Type":"ContainerDied","Data":"7c4448ba977fb79728e7e356c26f446bc3a9b268d4127d05dbfe2bbf900a6cca"} Oct 03 08:58:04 crc kubenswrapper[4899]: I1003 08:58:04.957282 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c4448ba977fb79728e7e356c26f446bc3a9b268d4127d05dbfe2bbf900a6cca" Oct 03 08:58:04 crc kubenswrapper[4899]: I1003 08:58:04.957345 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1337-account-create-6dt5c" Oct 03 08:58:04 crc kubenswrapper[4899]: I1003 08:58:04.967706 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-994c-account-create-6qslq" event={"ID":"7bd3bb54-4225-4123-b1e2-05bf17011397","Type":"ContainerDied","Data":"a180093d6aafcee948b596424d40888b1c1faf940754ef16f8603a5c3058c87b"} Oct 03 08:58:04 crc kubenswrapper[4899]: I1003 08:58:04.967750 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a180093d6aafcee948b596424d40888b1c1faf940754ef16f8603a5c3058c87b" Oct 03 08:58:04 crc kubenswrapper[4899]: I1003 08:58:04.968488 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-994c-account-create-6qslq" Oct 03 08:58:05 crc kubenswrapper[4899]: I1003 08:58:05.292323 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4370-account-create-zqsn8" Oct 03 08:58:05 crc kubenswrapper[4899]: I1003 08:58:05.398219 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lk5sx\" (UniqueName: \"kubernetes.io/projected/e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784-kube-api-access-lk5sx\") pod \"e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784\" (UID: \"e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784\") " Oct 03 08:58:05 crc kubenswrapper[4899]: I1003 08:58:05.410696 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784-kube-api-access-lk5sx" (OuterVolumeSpecName: "kube-api-access-lk5sx") pod "e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784" (UID: "e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784"). InnerVolumeSpecName "kube-api-access-lk5sx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:58:05 crc kubenswrapper[4899]: I1003 08:58:05.500538 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lk5sx\" (UniqueName: \"kubernetes.io/projected/e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784-kube-api-access-lk5sx\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:05 crc kubenswrapper[4899]: I1003 08:58:05.736583 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:58:05 crc kubenswrapper[4899]: I1003 08:58:05.736996 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerName="sg-core" containerID="cri-o://2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157" gracePeriod=30 Oct 03 08:58:05 crc kubenswrapper[4899]: I1003 08:58:05.737143 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerName="proxy-httpd" containerID="cri-o://3b9ff524633fb490a541ec9780a4aaff9d9a6f5daa238a18757a7987640f510d" gracePeriod=30 Oct 03 08:58:05 crc kubenswrapper[4899]: I1003 08:58:05.737441 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerName="ceilometer-notification-agent" containerID="cri-o://3d11bc7d73bf6c42fe41f9281afdeea816715b6c24915e1ce0159987413a0162" gracePeriod=30 Oct 03 08:58:05 crc kubenswrapper[4899]: I1003 08:58:05.737762 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerName="ceilometer-central-agent" containerID="cri-o://dad7d517c2ac4fc5d9d06252237fb209e3dabac64f2eff636ef2e68b2ccaa567" gracePeriod=30 Oct 03 08:58:05 crc kubenswrapper[4899]: I1003 08:58:05.985425 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4370-account-create-zqsn8" event={"ID":"e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784","Type":"ContainerDied","Data":"8d35bdfc3426e4f5aa5254430576f7d21505a038f2d3e588a7df81921c286bba"} Oct 03 08:58:05 crc kubenswrapper[4899]: I1003 08:58:05.985462 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d35bdfc3426e4f5aa5254430576f7d21505a038f2d3e588a7df81921c286bba" Oct 03 08:58:05 crc kubenswrapper[4899]: I1003 08:58:05.985515 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4370-account-create-zqsn8" Oct 03 08:58:05 crc kubenswrapper[4899]: I1003 08:58:05.989024 4899 generic.go:334] "Generic (PLEG): container finished" podID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerID="3b9ff524633fb490a541ec9780a4aaff9d9a6f5daa238a18757a7987640f510d" exitCode=0 Oct 03 08:58:05 crc kubenswrapper[4899]: I1003 08:58:05.989054 4899 generic.go:334] "Generic (PLEG): container finished" podID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerID="2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157" exitCode=2 Oct 03 08:58:05 crc kubenswrapper[4899]: I1003 08:58:05.990036 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f18fe07f-71a4-447a-a596-edd9aa1b281d","Type":"ContainerDied","Data":"3b9ff524633fb490a541ec9780a4aaff9d9a6f5daa238a18757a7987640f510d"} Oct 03 08:58:05 crc kubenswrapper[4899]: I1003 08:58:05.990068 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f18fe07f-71a4-447a-a596-edd9aa1b281d","Type":"ContainerDied","Data":"2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157"} Oct 03 08:58:05 crc kubenswrapper[4899]: E1003 08:58:05.996978 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf18fe07f_71a4_447a_a596_edd9aa1b281d.slice/crio-2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf18fe07f_71a4_447a_a596_edd9aa1b281d.slice/crio-conmon-2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157.scope\": RecentStats: unable to find data in memory cache]" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.520696 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.617675 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-config-data\") pod \"f18fe07f-71a4-447a-a596-edd9aa1b281d\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.617727 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f18fe07f-71a4-447a-a596-edd9aa1b281d-log-httpd\") pod \"f18fe07f-71a4-447a-a596-edd9aa1b281d\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.617752 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f18fe07f-71a4-447a-a596-edd9aa1b281d-run-httpd\") pod \"f18fe07f-71a4-447a-a596-edd9aa1b281d\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.617792 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5srsx\" (UniqueName: \"kubernetes.io/projected/f18fe07f-71a4-447a-a596-edd9aa1b281d-kube-api-access-5srsx\") pod \"f18fe07f-71a4-447a-a596-edd9aa1b281d\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.617843 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-scripts\") pod \"f18fe07f-71a4-447a-a596-edd9aa1b281d\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.617874 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-combined-ca-bundle\") pod \"f18fe07f-71a4-447a-a596-edd9aa1b281d\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.617931 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-sg-core-conf-yaml\") pod \"f18fe07f-71a4-447a-a596-edd9aa1b281d\" (UID: \"f18fe07f-71a4-447a-a596-edd9aa1b281d\") " Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.621006 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f18fe07f-71a4-447a-a596-edd9aa1b281d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f18fe07f-71a4-447a-a596-edd9aa1b281d" (UID: "f18fe07f-71a4-447a-a596-edd9aa1b281d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.625830 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f18fe07f-71a4-447a-a596-edd9aa1b281d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f18fe07f-71a4-447a-a596-edd9aa1b281d" (UID: "f18fe07f-71a4-447a-a596-edd9aa1b281d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.630185 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f18fe07f-71a4-447a-a596-edd9aa1b281d-kube-api-access-5srsx" (OuterVolumeSpecName: "kube-api-access-5srsx") pod "f18fe07f-71a4-447a-a596-edd9aa1b281d" (UID: "f18fe07f-71a4-447a-a596-edd9aa1b281d"). InnerVolumeSpecName "kube-api-access-5srsx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.644231 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-scripts" (OuterVolumeSpecName: "scripts") pod "f18fe07f-71a4-447a-a596-edd9aa1b281d" (UID: "f18fe07f-71a4-447a-a596-edd9aa1b281d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.651221 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f18fe07f-71a4-447a-a596-edd9aa1b281d" (UID: "f18fe07f-71a4-447a-a596-edd9aa1b281d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.720446 4899 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f18fe07f-71a4-447a-a596-edd9aa1b281d-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.720488 4899 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f18fe07f-71a4-447a-a596-edd9aa1b281d-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.720527 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5srsx\" (UniqueName: \"kubernetes.io/projected/f18fe07f-71a4-447a-a596-edd9aa1b281d-kube-api-access-5srsx\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.720543 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.720554 4899 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.721790 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-config-data" (OuterVolumeSpecName: "config-data") pod "f18fe07f-71a4-447a-a596-edd9aa1b281d" (UID: "f18fe07f-71a4-447a-a596-edd9aa1b281d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.722305 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f18fe07f-71a4-447a-a596-edd9aa1b281d" (UID: "f18fe07f-71a4-447a-a596-edd9aa1b281d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.822464 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.822507 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f18fe07f-71a4-447a-a596-edd9aa1b281d-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.894399 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bvzwv"] Oct 03 08:58:06 crc kubenswrapper[4899]: E1003 08:58:06.894840 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerName="ceilometer-notification-agent" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.894858 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerName="ceilometer-notification-agent" Oct 03 08:58:06 crc kubenswrapper[4899]: E1003 08:58:06.894872 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerName="ceilometer-central-agent" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.894880 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerName="ceilometer-central-agent" Oct 03 08:58:06 crc kubenswrapper[4899]: E1003 08:58:06.894913 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85a158ee-9e94-4664-b795-9fffb1fd2674" containerName="mariadb-account-create" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.894922 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="85a158ee-9e94-4664-b795-9fffb1fd2674" containerName="mariadb-account-create" Oct 03 08:58:06 crc kubenswrapper[4899]: E1003 08:58:06.894934 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerName="sg-core" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.894941 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerName="sg-core" Oct 03 08:58:06 crc kubenswrapper[4899]: E1003 08:58:06.894954 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784" containerName="mariadb-account-create" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.894961 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784" containerName="mariadb-account-create" Oct 03 08:58:06 crc kubenswrapper[4899]: E1003 08:58:06.894991 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerName="proxy-httpd" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.894998 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerName="proxy-httpd" Oct 03 08:58:06 crc kubenswrapper[4899]: E1003 08:58:06.895013 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bd3bb54-4225-4123-b1e2-05bf17011397" containerName="mariadb-account-create" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.895020 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bd3bb54-4225-4123-b1e2-05bf17011397" containerName="mariadb-account-create" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.895210 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerName="ceilometer-central-agent" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.895235 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="85a158ee-9e94-4664-b795-9fffb1fd2674" containerName="mariadb-account-create" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.895243 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerName="sg-core" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.895257 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784" containerName="mariadb-account-create" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.895272 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bd3bb54-4225-4123-b1e2-05bf17011397" containerName="mariadb-account-create" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.895283 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerName="ceilometer-notification-agent" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.895290 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerName="proxy-httpd" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.896075 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-bvzwv" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.899366 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-2rskq" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.905281 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.905288 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.906341 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bvzwv"] Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.924770 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52b7bde3-1637-40ef-8b47-f918b97a958d-scripts\") pod \"nova-cell0-conductor-db-sync-bvzwv\" (UID: \"52b7bde3-1637-40ef-8b47-f918b97a958d\") " pod="openstack/nova-cell0-conductor-db-sync-bvzwv" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.924833 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46vbf\" (UniqueName: \"kubernetes.io/projected/52b7bde3-1637-40ef-8b47-f918b97a958d-kube-api-access-46vbf\") pod \"nova-cell0-conductor-db-sync-bvzwv\" (UID: \"52b7bde3-1637-40ef-8b47-f918b97a958d\") " pod="openstack/nova-cell0-conductor-db-sync-bvzwv" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.925175 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52b7bde3-1637-40ef-8b47-f918b97a958d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-bvzwv\" (UID: \"52b7bde3-1637-40ef-8b47-f918b97a958d\") " pod="openstack/nova-cell0-conductor-db-sync-bvzwv" Oct 03 08:58:06 crc kubenswrapper[4899]: I1003 08:58:06.925216 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52b7bde3-1637-40ef-8b47-f918b97a958d-config-data\") pod \"nova-cell0-conductor-db-sync-bvzwv\" (UID: \"52b7bde3-1637-40ef-8b47-f918b97a958d\") " pod="openstack/nova-cell0-conductor-db-sync-bvzwv" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.001551 4899 generic.go:334] "Generic (PLEG): container finished" podID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerID="3d11bc7d73bf6c42fe41f9281afdeea816715b6c24915e1ce0159987413a0162" exitCode=0 Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.001691 4899 generic.go:334] "Generic (PLEG): container finished" podID="f18fe07f-71a4-447a-a596-edd9aa1b281d" containerID="dad7d517c2ac4fc5d9d06252237fb209e3dabac64f2eff636ef2e68b2ccaa567" exitCode=0 Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.001668 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.001642 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f18fe07f-71a4-447a-a596-edd9aa1b281d","Type":"ContainerDied","Data":"3d11bc7d73bf6c42fe41f9281afdeea816715b6c24915e1ce0159987413a0162"} Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.001774 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f18fe07f-71a4-447a-a596-edd9aa1b281d","Type":"ContainerDied","Data":"dad7d517c2ac4fc5d9d06252237fb209e3dabac64f2eff636ef2e68b2ccaa567"} Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.001789 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f18fe07f-71a4-447a-a596-edd9aa1b281d","Type":"ContainerDied","Data":"68881ea014b23ed5a2000a93492fb85ad41b6d7a5a32e617c8382603802aa946"} Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.001804 4899 scope.go:117] "RemoveContainer" containerID="3b9ff524633fb490a541ec9780a4aaff9d9a6f5daa238a18757a7987640f510d" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.020978 4899 scope.go:117] "RemoveContainer" containerID="2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.028815 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52b7bde3-1637-40ef-8b47-f918b97a958d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-bvzwv\" (UID: \"52b7bde3-1637-40ef-8b47-f918b97a958d\") " pod="openstack/nova-cell0-conductor-db-sync-bvzwv" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.028864 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52b7bde3-1637-40ef-8b47-f918b97a958d-config-data\") pod \"nova-cell0-conductor-db-sync-bvzwv\" (UID: \"52b7bde3-1637-40ef-8b47-f918b97a958d\") " pod="openstack/nova-cell0-conductor-db-sync-bvzwv" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.028932 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52b7bde3-1637-40ef-8b47-f918b97a958d-scripts\") pod \"nova-cell0-conductor-db-sync-bvzwv\" (UID: \"52b7bde3-1637-40ef-8b47-f918b97a958d\") " pod="openstack/nova-cell0-conductor-db-sync-bvzwv" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.028966 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46vbf\" (UniqueName: \"kubernetes.io/projected/52b7bde3-1637-40ef-8b47-f918b97a958d-kube-api-access-46vbf\") pod \"nova-cell0-conductor-db-sync-bvzwv\" (UID: \"52b7bde3-1637-40ef-8b47-f918b97a958d\") " pod="openstack/nova-cell0-conductor-db-sync-bvzwv" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.039158 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52b7bde3-1637-40ef-8b47-f918b97a958d-config-data\") pod \"nova-cell0-conductor-db-sync-bvzwv\" (UID: \"52b7bde3-1637-40ef-8b47-f918b97a958d\") " pod="openstack/nova-cell0-conductor-db-sync-bvzwv" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.040113 4899 scope.go:117] "RemoveContainer" containerID="3d11bc7d73bf6c42fe41f9281afdeea816715b6c24915e1ce0159987413a0162" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.042320 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52b7bde3-1637-40ef-8b47-f918b97a958d-scripts\") pod \"nova-cell0-conductor-db-sync-bvzwv\" (UID: \"52b7bde3-1637-40ef-8b47-f918b97a958d\") " pod="openstack/nova-cell0-conductor-db-sync-bvzwv" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.043849 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52b7bde3-1637-40ef-8b47-f918b97a958d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-bvzwv\" (UID: \"52b7bde3-1637-40ef-8b47-f918b97a958d\") " pod="openstack/nova-cell0-conductor-db-sync-bvzwv" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.054203 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46vbf\" (UniqueName: \"kubernetes.io/projected/52b7bde3-1637-40ef-8b47-f918b97a958d-kube-api-access-46vbf\") pod \"nova-cell0-conductor-db-sync-bvzwv\" (UID: \"52b7bde3-1637-40ef-8b47-f918b97a958d\") " pod="openstack/nova-cell0-conductor-db-sync-bvzwv" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.064475 4899 scope.go:117] "RemoveContainer" containerID="dad7d517c2ac4fc5d9d06252237fb209e3dabac64f2eff636ef2e68b2ccaa567" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.064995 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.074059 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.097925 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.101784 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.104241 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.104527 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.119865 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.130115 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.130192 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.130253 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02c3a7a7-f888-457b-917d-7c9325bbf95f-run-httpd\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.130310 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-scripts\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.130330 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-config-data\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.130386 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpgmz\" (UniqueName: \"kubernetes.io/projected/02c3a7a7-f888-457b-917d-7c9325bbf95f-kube-api-access-zpgmz\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.130411 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02c3a7a7-f888-457b-917d-7c9325bbf95f-log-httpd\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.134370 4899 scope.go:117] "RemoveContainer" containerID="3b9ff524633fb490a541ec9780a4aaff9d9a6f5daa238a18757a7987640f510d" Oct 03 08:58:07 crc kubenswrapper[4899]: E1003 08:58:07.134887 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b9ff524633fb490a541ec9780a4aaff9d9a6f5daa238a18757a7987640f510d\": container with ID starting with 3b9ff524633fb490a541ec9780a4aaff9d9a6f5daa238a18757a7987640f510d not found: ID does not exist" containerID="3b9ff524633fb490a541ec9780a4aaff9d9a6f5daa238a18757a7987640f510d" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.134947 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b9ff524633fb490a541ec9780a4aaff9d9a6f5daa238a18757a7987640f510d"} err="failed to get container status \"3b9ff524633fb490a541ec9780a4aaff9d9a6f5daa238a18757a7987640f510d\": rpc error: code = NotFound desc = could not find container \"3b9ff524633fb490a541ec9780a4aaff9d9a6f5daa238a18757a7987640f510d\": container with ID starting with 3b9ff524633fb490a541ec9780a4aaff9d9a6f5daa238a18757a7987640f510d not found: ID does not exist" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.134975 4899 scope.go:117] "RemoveContainer" containerID="2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157" Oct 03 08:58:07 crc kubenswrapper[4899]: E1003 08:58:07.135298 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157\": container with ID starting with 2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157 not found: ID does not exist" containerID="2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.135333 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157"} err="failed to get container status \"2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157\": rpc error: code = NotFound desc = could not find container \"2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157\": container with ID starting with 2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157 not found: ID does not exist" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.135359 4899 scope.go:117] "RemoveContainer" containerID="3d11bc7d73bf6c42fe41f9281afdeea816715b6c24915e1ce0159987413a0162" Oct 03 08:58:07 crc kubenswrapper[4899]: E1003 08:58:07.135690 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d11bc7d73bf6c42fe41f9281afdeea816715b6c24915e1ce0159987413a0162\": container with ID starting with 3d11bc7d73bf6c42fe41f9281afdeea816715b6c24915e1ce0159987413a0162 not found: ID does not exist" containerID="3d11bc7d73bf6c42fe41f9281afdeea816715b6c24915e1ce0159987413a0162" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.135730 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d11bc7d73bf6c42fe41f9281afdeea816715b6c24915e1ce0159987413a0162"} err="failed to get container status \"3d11bc7d73bf6c42fe41f9281afdeea816715b6c24915e1ce0159987413a0162\": rpc error: code = NotFound desc = could not find container \"3d11bc7d73bf6c42fe41f9281afdeea816715b6c24915e1ce0159987413a0162\": container with ID starting with 3d11bc7d73bf6c42fe41f9281afdeea816715b6c24915e1ce0159987413a0162 not found: ID does not exist" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.135757 4899 scope.go:117] "RemoveContainer" containerID="dad7d517c2ac4fc5d9d06252237fb209e3dabac64f2eff636ef2e68b2ccaa567" Oct 03 08:58:07 crc kubenswrapper[4899]: E1003 08:58:07.136009 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dad7d517c2ac4fc5d9d06252237fb209e3dabac64f2eff636ef2e68b2ccaa567\": container with ID starting with dad7d517c2ac4fc5d9d06252237fb209e3dabac64f2eff636ef2e68b2ccaa567 not found: ID does not exist" containerID="dad7d517c2ac4fc5d9d06252237fb209e3dabac64f2eff636ef2e68b2ccaa567" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.136036 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dad7d517c2ac4fc5d9d06252237fb209e3dabac64f2eff636ef2e68b2ccaa567"} err="failed to get container status \"dad7d517c2ac4fc5d9d06252237fb209e3dabac64f2eff636ef2e68b2ccaa567\": rpc error: code = NotFound desc = could not find container \"dad7d517c2ac4fc5d9d06252237fb209e3dabac64f2eff636ef2e68b2ccaa567\": container with ID starting with dad7d517c2ac4fc5d9d06252237fb209e3dabac64f2eff636ef2e68b2ccaa567 not found: ID does not exist" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.136051 4899 scope.go:117] "RemoveContainer" containerID="3b9ff524633fb490a541ec9780a4aaff9d9a6f5daa238a18757a7987640f510d" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.136295 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b9ff524633fb490a541ec9780a4aaff9d9a6f5daa238a18757a7987640f510d"} err="failed to get container status \"3b9ff524633fb490a541ec9780a4aaff9d9a6f5daa238a18757a7987640f510d\": rpc error: code = NotFound desc = could not find container \"3b9ff524633fb490a541ec9780a4aaff9d9a6f5daa238a18757a7987640f510d\": container with ID starting with 3b9ff524633fb490a541ec9780a4aaff9d9a6f5daa238a18757a7987640f510d not found: ID does not exist" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.136324 4899 scope.go:117] "RemoveContainer" containerID="2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.136561 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157"} err="failed to get container status \"2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157\": rpc error: code = NotFound desc = could not find container \"2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157\": container with ID starting with 2f64c437061942d5f568aee839894ef481892a2e1f6f3f5f0cfca3e9cfe36157 not found: ID does not exist" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.136582 4899 scope.go:117] "RemoveContainer" containerID="3d11bc7d73bf6c42fe41f9281afdeea816715b6c24915e1ce0159987413a0162" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.136919 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d11bc7d73bf6c42fe41f9281afdeea816715b6c24915e1ce0159987413a0162"} err="failed to get container status \"3d11bc7d73bf6c42fe41f9281afdeea816715b6c24915e1ce0159987413a0162\": rpc error: code = NotFound desc = could not find container \"3d11bc7d73bf6c42fe41f9281afdeea816715b6c24915e1ce0159987413a0162\": container with ID starting with 3d11bc7d73bf6c42fe41f9281afdeea816715b6c24915e1ce0159987413a0162 not found: ID does not exist" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.136971 4899 scope.go:117] "RemoveContainer" containerID="dad7d517c2ac4fc5d9d06252237fb209e3dabac64f2eff636ef2e68b2ccaa567" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.137238 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dad7d517c2ac4fc5d9d06252237fb209e3dabac64f2eff636ef2e68b2ccaa567"} err="failed to get container status \"dad7d517c2ac4fc5d9d06252237fb209e3dabac64f2eff636ef2e68b2ccaa567\": rpc error: code = NotFound desc = could not find container \"dad7d517c2ac4fc5d9d06252237fb209e3dabac64f2eff636ef2e68b2ccaa567\": container with ID starting with dad7d517c2ac4fc5d9d06252237fb209e3dabac64f2eff636ef2e68b2ccaa567 not found: ID does not exist" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.228631 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-bvzwv" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.231250 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpgmz\" (UniqueName: \"kubernetes.io/projected/02c3a7a7-f888-457b-917d-7c9325bbf95f-kube-api-access-zpgmz\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.231386 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02c3a7a7-f888-457b-917d-7c9325bbf95f-log-httpd\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.231522 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.232062 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.232159 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02c3a7a7-f888-457b-917d-7c9325bbf95f-run-httpd\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.232270 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-scripts\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.232356 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-config-data\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.232406 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02c3a7a7-f888-457b-917d-7c9325bbf95f-run-httpd\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.231926 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02c3a7a7-f888-457b-917d-7c9325bbf95f-log-httpd\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.235451 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.235841 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-scripts\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.236491 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.244245 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-config-data\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.250710 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpgmz\" (UniqueName: \"kubernetes.io/projected/02c3a7a7-f888-457b-917d-7c9325bbf95f-kube-api-access-zpgmz\") pod \"ceilometer-0\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.433951 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.682056 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bvzwv"] Oct 03 08:58:07 crc kubenswrapper[4899]: I1003 08:58:07.918691 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:58:08 crc kubenswrapper[4899]: I1003 08:58:08.011832 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02c3a7a7-f888-457b-917d-7c9325bbf95f","Type":"ContainerStarted","Data":"7805101d8dbf1df1db809d83fbd4b67105b3966b697751179ab4a64d58fae930"} Oct 03 08:58:08 crc kubenswrapper[4899]: I1003 08:58:08.013070 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-bvzwv" event={"ID":"52b7bde3-1637-40ef-8b47-f918b97a958d","Type":"ContainerStarted","Data":"a8c7c4f1ed0bef2180387f5e0c5cac020e4a45958c3c937880d7fcecc68f9024"} Oct 03 08:58:08 crc kubenswrapper[4899]: I1003 08:58:08.537701 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f18fe07f-71a4-447a-a596-edd9aa1b281d" path="/var/lib/kubelet/pods/f18fe07f-71a4-447a-a596-edd9aa1b281d/volumes" Oct 03 08:58:09 crc kubenswrapper[4899]: I1003 08:58:09.026976 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02c3a7a7-f888-457b-917d-7c9325bbf95f","Type":"ContainerStarted","Data":"55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882"} Oct 03 08:58:09 crc kubenswrapper[4899]: I1003 08:58:09.476665 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:58:10 crc kubenswrapper[4899]: I1003 08:58:10.040096 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02c3a7a7-f888-457b-917d-7c9325bbf95f","Type":"ContainerStarted","Data":"9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c"} Oct 03 08:58:11 crc kubenswrapper[4899]: I1003 08:58:11.058082 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02c3a7a7-f888-457b-917d-7c9325bbf95f","Type":"ContainerStarted","Data":"6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605"} Oct 03 08:58:16 crc kubenswrapper[4899]: I1003 08:58:16.117195 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-bvzwv" event={"ID":"52b7bde3-1637-40ef-8b47-f918b97a958d","Type":"ContainerStarted","Data":"eb136b32f264090663bb52dfb6d93da62eba3d29aa93f368ca6883509671f4ce"} Oct 03 08:58:16 crc kubenswrapper[4899]: I1003 08:58:16.121555 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02c3a7a7-f888-457b-917d-7c9325bbf95f","Type":"ContainerStarted","Data":"99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f"} Oct 03 08:58:16 crc kubenswrapper[4899]: I1003 08:58:16.121745 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerName="ceilometer-central-agent" containerID="cri-o://55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882" gracePeriod=30 Oct 03 08:58:16 crc kubenswrapper[4899]: I1003 08:58:16.121817 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerName="ceilometer-notification-agent" containerID="cri-o://9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c" gracePeriod=30 Oct 03 08:58:16 crc kubenswrapper[4899]: I1003 08:58:16.121762 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 08:58:16 crc kubenswrapper[4899]: I1003 08:58:16.121802 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerName="sg-core" containerID="cri-o://6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605" gracePeriod=30 Oct 03 08:58:16 crc kubenswrapper[4899]: I1003 08:58:16.121777 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerName="proxy-httpd" containerID="cri-o://99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f" gracePeriod=30 Oct 03 08:58:16 crc kubenswrapper[4899]: I1003 08:58:16.133343 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-bvzwv" podStartSLOduration=2.717918309 podStartE2EDuration="10.13332667s" podCreationTimestamp="2025-10-03 08:58:06 +0000 UTC" firstStartedPulling="2025-10-03 08:58:07.691763381 +0000 UTC m=+1061.799248334" lastFinishedPulling="2025-10-03 08:58:15.107171742 +0000 UTC m=+1069.214656695" observedRunningTime="2025-10-03 08:58:16.132255757 +0000 UTC m=+1070.239740710" watchObservedRunningTime="2025-10-03 08:58:16.13332667 +0000 UTC m=+1070.240811623" Oct 03 08:58:16 crc kubenswrapper[4899]: I1003 08:58:16.161564 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.9842987 podStartE2EDuration="9.16154299s" podCreationTimestamp="2025-10-03 08:58:07 +0000 UTC" firstStartedPulling="2025-10-03 08:58:07.926169283 +0000 UTC m=+1062.033654236" lastFinishedPulling="2025-10-03 08:58:15.103413573 +0000 UTC m=+1069.210898526" observedRunningTime="2025-10-03 08:58:16.158722101 +0000 UTC m=+1070.266207054" watchObservedRunningTime="2025-10-03 08:58:16.16154299 +0000 UTC m=+1070.269027943" Oct 03 08:58:16 crc kubenswrapper[4899]: E1003 08:58:16.248533 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02c3a7a7_f888_457b_917d_7c9325bbf95f.slice/crio-6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605.scope\": RecentStats: unable to find data in memory cache]" Oct 03 08:58:16 crc kubenswrapper[4899]: I1003 08:58:16.846035 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.042424 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02c3a7a7-f888-457b-917d-7c9325bbf95f-log-httpd\") pod \"02c3a7a7-f888-457b-917d-7c9325bbf95f\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.042466 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-config-data\") pod \"02c3a7a7-f888-457b-917d-7c9325bbf95f\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.042518 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02c3a7a7-f888-457b-917d-7c9325bbf95f-run-httpd\") pod \"02c3a7a7-f888-457b-917d-7c9325bbf95f\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.042864 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpgmz\" (UniqueName: \"kubernetes.io/projected/02c3a7a7-f888-457b-917d-7c9325bbf95f-kube-api-access-zpgmz\") pod \"02c3a7a7-f888-457b-917d-7c9325bbf95f\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.042921 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-scripts\") pod \"02c3a7a7-f888-457b-917d-7c9325bbf95f\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.042978 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-sg-core-conf-yaml\") pod \"02c3a7a7-f888-457b-917d-7c9325bbf95f\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.043002 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-combined-ca-bundle\") pod \"02c3a7a7-f888-457b-917d-7c9325bbf95f\" (UID: \"02c3a7a7-f888-457b-917d-7c9325bbf95f\") " Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.043293 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02c3a7a7-f888-457b-917d-7c9325bbf95f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "02c3a7a7-f888-457b-917d-7c9325bbf95f" (UID: "02c3a7a7-f888-457b-917d-7c9325bbf95f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.043388 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02c3a7a7-f888-457b-917d-7c9325bbf95f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "02c3a7a7-f888-457b-917d-7c9325bbf95f" (UID: "02c3a7a7-f888-457b-917d-7c9325bbf95f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.043872 4899 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02c3a7a7-f888-457b-917d-7c9325bbf95f-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.043915 4899 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02c3a7a7-f888-457b-917d-7c9325bbf95f-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.048292 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-scripts" (OuterVolumeSpecName: "scripts") pod "02c3a7a7-f888-457b-917d-7c9325bbf95f" (UID: "02c3a7a7-f888-457b-917d-7c9325bbf95f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.048534 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02c3a7a7-f888-457b-917d-7c9325bbf95f-kube-api-access-zpgmz" (OuterVolumeSpecName: "kube-api-access-zpgmz") pod "02c3a7a7-f888-457b-917d-7c9325bbf95f" (UID: "02c3a7a7-f888-457b-917d-7c9325bbf95f"). InnerVolumeSpecName "kube-api-access-zpgmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.070478 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "02c3a7a7-f888-457b-917d-7c9325bbf95f" (UID: "02c3a7a7-f888-457b-917d-7c9325bbf95f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.116997 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02c3a7a7-f888-457b-917d-7c9325bbf95f" (UID: "02c3a7a7-f888-457b-917d-7c9325bbf95f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.133453 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-config-data" (OuterVolumeSpecName: "config-data") pod "02c3a7a7-f888-457b-917d-7c9325bbf95f" (UID: "02c3a7a7-f888-457b-917d-7c9325bbf95f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.142381 4899 generic.go:334] "Generic (PLEG): container finished" podID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerID="99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f" exitCode=0 Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.142422 4899 generic.go:334] "Generic (PLEG): container finished" podID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerID="6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605" exitCode=2 Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.142433 4899 generic.go:334] "Generic (PLEG): container finished" podID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerID="9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c" exitCode=0 Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.142440 4899 generic.go:334] "Generic (PLEG): container finished" podID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerID="55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882" exitCode=0 Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.142451 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.142473 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02c3a7a7-f888-457b-917d-7c9325bbf95f","Type":"ContainerDied","Data":"99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f"} Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.142539 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02c3a7a7-f888-457b-917d-7c9325bbf95f","Type":"ContainerDied","Data":"6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605"} Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.142555 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02c3a7a7-f888-457b-917d-7c9325bbf95f","Type":"ContainerDied","Data":"9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c"} Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.142573 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02c3a7a7-f888-457b-917d-7c9325bbf95f","Type":"ContainerDied","Data":"55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882"} Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.142586 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02c3a7a7-f888-457b-917d-7c9325bbf95f","Type":"ContainerDied","Data":"7805101d8dbf1df1db809d83fbd4b67105b3966b697751179ab4a64d58fae930"} Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.142611 4899 scope.go:117] "RemoveContainer" containerID="99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.145318 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpgmz\" (UniqueName: \"kubernetes.io/projected/02c3a7a7-f888-457b-917d-7c9325bbf95f-kube-api-access-zpgmz\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.145347 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.145359 4899 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.145370 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.145380 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02c3a7a7-f888-457b-917d-7c9325bbf95f-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.161519 4899 scope.go:117] "RemoveContainer" containerID="6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.179450 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.208327 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.214197 4899 scope.go:117] "RemoveContainer" containerID="9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.225870 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:58:17 crc kubenswrapper[4899]: E1003 08:58:17.226367 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerName="proxy-httpd" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.226394 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerName="proxy-httpd" Oct 03 08:58:17 crc kubenswrapper[4899]: E1003 08:58:17.226412 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerName="ceilometer-notification-agent" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.226421 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerName="ceilometer-notification-agent" Oct 03 08:58:17 crc kubenswrapper[4899]: E1003 08:58:17.226436 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerName="ceilometer-central-agent" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.226444 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerName="ceilometer-central-agent" Oct 03 08:58:17 crc kubenswrapper[4899]: E1003 08:58:17.226493 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerName="sg-core" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.226501 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerName="sg-core" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.226711 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerName="ceilometer-central-agent" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.226764 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerName="ceilometer-notification-agent" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.226786 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerName="proxy-httpd" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.226802 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="02c3a7a7-f888-457b-917d-7c9325bbf95f" containerName="sg-core" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.229269 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.232240 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.232697 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.235002 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.248777 4899 scope.go:117] "RemoveContainer" containerID="55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.332193 4899 scope.go:117] "RemoveContainer" containerID="99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f" Oct 03 08:58:17 crc kubenswrapper[4899]: E1003 08:58:17.332660 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f\": container with ID starting with 99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f not found: ID does not exist" containerID="99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.332693 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f"} err="failed to get container status \"99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f\": rpc error: code = NotFound desc = could not find container \"99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f\": container with ID starting with 99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f not found: ID does not exist" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.332717 4899 scope.go:117] "RemoveContainer" containerID="6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605" Oct 03 08:58:17 crc kubenswrapper[4899]: E1003 08:58:17.332990 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605\": container with ID starting with 6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605 not found: ID does not exist" containerID="6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.333023 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605"} err="failed to get container status \"6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605\": rpc error: code = NotFound desc = could not find container \"6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605\": container with ID starting with 6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605 not found: ID does not exist" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.333039 4899 scope.go:117] "RemoveContainer" containerID="9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c" Oct 03 08:58:17 crc kubenswrapper[4899]: E1003 08:58:17.333312 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c\": container with ID starting with 9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c not found: ID does not exist" containerID="9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.333336 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c"} err="failed to get container status \"9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c\": rpc error: code = NotFound desc = could not find container \"9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c\": container with ID starting with 9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c not found: ID does not exist" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.333351 4899 scope.go:117] "RemoveContainer" containerID="55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882" Oct 03 08:58:17 crc kubenswrapper[4899]: E1003 08:58:17.333618 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882\": container with ID starting with 55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882 not found: ID does not exist" containerID="55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.333665 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882"} err="failed to get container status \"55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882\": rpc error: code = NotFound desc = could not find container \"55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882\": container with ID starting with 55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882 not found: ID does not exist" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.333693 4899 scope.go:117] "RemoveContainer" containerID="99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.333959 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f"} err="failed to get container status \"99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f\": rpc error: code = NotFound desc = could not find container \"99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f\": container with ID starting with 99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f not found: ID does not exist" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.333979 4899 scope.go:117] "RemoveContainer" containerID="6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.334205 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605"} err="failed to get container status \"6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605\": rpc error: code = NotFound desc = could not find container \"6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605\": container with ID starting with 6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605 not found: ID does not exist" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.334225 4899 scope.go:117] "RemoveContainer" containerID="9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.334503 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c"} err="failed to get container status \"9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c\": rpc error: code = NotFound desc = could not find container \"9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c\": container with ID starting with 9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c not found: ID does not exist" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.334525 4899 scope.go:117] "RemoveContainer" containerID="55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.334737 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882"} err="failed to get container status \"55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882\": rpc error: code = NotFound desc = could not find container \"55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882\": container with ID starting with 55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882 not found: ID does not exist" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.334839 4899 scope.go:117] "RemoveContainer" containerID="99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.335131 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f"} err="failed to get container status \"99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f\": rpc error: code = NotFound desc = could not find container \"99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f\": container with ID starting with 99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f not found: ID does not exist" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.335150 4899 scope.go:117] "RemoveContainer" containerID="6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.335346 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605"} err="failed to get container status \"6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605\": rpc error: code = NotFound desc = could not find container \"6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605\": container with ID starting with 6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605 not found: ID does not exist" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.335451 4899 scope.go:117] "RemoveContainer" containerID="9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.336053 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c"} err="failed to get container status \"9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c\": rpc error: code = NotFound desc = could not find container \"9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c\": container with ID starting with 9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c not found: ID does not exist" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.336074 4899 scope.go:117] "RemoveContainer" containerID="55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.336276 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882"} err="failed to get container status \"55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882\": rpc error: code = NotFound desc = could not find container \"55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882\": container with ID starting with 55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882 not found: ID does not exist" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.336295 4899 scope.go:117] "RemoveContainer" containerID="99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.336641 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f"} err="failed to get container status \"99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f\": rpc error: code = NotFound desc = could not find container \"99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f\": container with ID starting with 99fba82b066f3be8b6584aa09551089b559497274224675d4f5ce41ea607f45f not found: ID does not exist" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.336703 4899 scope.go:117] "RemoveContainer" containerID="6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.336971 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605"} err="failed to get container status \"6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605\": rpc error: code = NotFound desc = could not find container \"6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605\": container with ID starting with 6af4f6123b3fad8bf00327ebd024d0ec0636bc9086be0bc15c1648bc89d5f605 not found: ID does not exist" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.337080 4899 scope.go:117] "RemoveContainer" containerID="9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.337389 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c"} err="failed to get container status \"9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c\": rpc error: code = NotFound desc = could not find container \"9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c\": container with ID starting with 9171f5caf105a42cdc52f33085f006f1e8fd737f62e7f8b08e787ad15f9d0f2c not found: ID does not exist" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.337406 4899 scope.go:117] "RemoveContainer" containerID="55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.337630 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882"} err="failed to get container status \"55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882\": rpc error: code = NotFound desc = could not find container \"55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882\": container with ID starting with 55fbc8f951151edf28b22c7e0950137adb41a8b90cbd85e7f9dd0123be035882 not found: ID does not exist" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.349150 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.349216 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/833e2f10-2b54-4a61-a4ff-6e295668bca9-run-httpd\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.349298 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-scripts\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.349371 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-config-data\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.349415 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/833e2f10-2b54-4a61-a4ff-6e295668bca9-log-httpd\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.349449 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcjzk\" (UniqueName: \"kubernetes.io/projected/833e2f10-2b54-4a61-a4ff-6e295668bca9-kube-api-access-tcjzk\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.349478 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.451564 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-scripts\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.451688 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-config-data\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.452111 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/833e2f10-2b54-4a61-a4ff-6e295668bca9-log-httpd\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.452169 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcjzk\" (UniqueName: \"kubernetes.io/projected/833e2f10-2b54-4a61-a4ff-6e295668bca9-kube-api-access-tcjzk\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.452209 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.452386 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.452422 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/833e2f10-2b54-4a61-a4ff-6e295668bca9-run-httpd\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.452667 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/833e2f10-2b54-4a61-a4ff-6e295668bca9-log-httpd\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.452862 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/833e2f10-2b54-4a61-a4ff-6e295668bca9-run-httpd\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.457073 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.457428 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.458305 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-config-data\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.458869 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-scripts\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.481852 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcjzk\" (UniqueName: \"kubernetes.io/projected/833e2f10-2b54-4a61-a4ff-6e295668bca9-kube-api-access-tcjzk\") pod \"ceilometer-0\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " pod="openstack/ceilometer-0" Oct 03 08:58:17 crc kubenswrapper[4899]: I1003 08:58:17.635036 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:58:18 crc kubenswrapper[4899]: I1003 08:58:18.091027 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:58:18 crc kubenswrapper[4899]: W1003 08:58:18.100577 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod833e2f10_2b54_4a61_a4ff_6e295668bca9.slice/crio-d1bb7ba3b350b36e9a3623279cb7d5a1a44fc43b9cd3c3403e206e89058c8255 WatchSource:0}: Error finding container d1bb7ba3b350b36e9a3623279cb7d5a1a44fc43b9cd3c3403e206e89058c8255: Status 404 returned error can't find the container with id d1bb7ba3b350b36e9a3623279cb7d5a1a44fc43b9cd3c3403e206e89058c8255 Oct 03 08:58:18 crc kubenswrapper[4899]: I1003 08:58:18.154449 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"833e2f10-2b54-4a61-a4ff-6e295668bca9","Type":"ContainerStarted","Data":"d1bb7ba3b350b36e9a3623279cb7d5a1a44fc43b9cd3c3403e206e89058c8255"} Oct 03 08:58:18 crc kubenswrapper[4899]: I1003 08:58:18.536638 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02c3a7a7-f888-457b-917d-7c9325bbf95f" path="/var/lib/kubelet/pods/02c3a7a7-f888-457b-917d-7c9325bbf95f/volumes" Oct 03 08:58:20 crc kubenswrapper[4899]: I1003 08:58:20.171591 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"833e2f10-2b54-4a61-a4ff-6e295668bca9","Type":"ContainerStarted","Data":"6001d54af0e93a4e9dcd5815009d06519c4ea2e1817dc5bc5ffc057d4a40d01b"} Oct 03 08:58:21 crc kubenswrapper[4899]: I1003 08:58:21.189778 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"833e2f10-2b54-4a61-a4ff-6e295668bca9","Type":"ContainerStarted","Data":"fb7c18b6e87c4efb15351aff3c6e54a4e8c1d9f3997d89ffe943ba314389da83"} Oct 03 08:58:22 crc kubenswrapper[4899]: I1003 08:58:22.208151 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"833e2f10-2b54-4a61-a4ff-6e295668bca9","Type":"ContainerStarted","Data":"47c74d771242b8f8264859afbb6e6db38ef03dcaf80955908d72372df7f3d0d1"} Oct 03 08:58:23 crc kubenswrapper[4899]: I1003 08:58:23.218221 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"833e2f10-2b54-4a61-a4ff-6e295668bca9","Type":"ContainerStarted","Data":"f5a8aaa3c7949b91ff3740ff91eb325a0e3d9c6ee500f3e2f81e939589ffa6c5"} Oct 03 08:58:23 crc kubenswrapper[4899]: I1003 08:58:23.218679 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 08:58:23 crc kubenswrapper[4899]: I1003 08:58:23.240014 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.603957631 podStartE2EDuration="6.239994755s" podCreationTimestamp="2025-10-03 08:58:17 +0000 UTC" firstStartedPulling="2025-10-03 08:58:18.104107648 +0000 UTC m=+1072.211592601" lastFinishedPulling="2025-10-03 08:58:22.740144772 +0000 UTC m=+1076.847629725" observedRunningTime="2025-10-03 08:58:23.237378842 +0000 UTC m=+1077.344863795" watchObservedRunningTime="2025-10-03 08:58:23.239994755 +0000 UTC m=+1077.347479708" Oct 03 08:58:25 crc kubenswrapper[4899]: I1003 08:58:25.236198 4899 generic.go:334] "Generic (PLEG): container finished" podID="52b7bde3-1637-40ef-8b47-f918b97a958d" containerID="eb136b32f264090663bb52dfb6d93da62eba3d29aa93f368ca6883509671f4ce" exitCode=0 Oct 03 08:58:25 crc kubenswrapper[4899]: I1003 08:58:25.236225 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-bvzwv" event={"ID":"52b7bde3-1637-40ef-8b47-f918b97a958d","Type":"ContainerDied","Data":"eb136b32f264090663bb52dfb6d93da62eba3d29aa93f368ca6883509671f4ce"} Oct 03 08:58:26 crc kubenswrapper[4899]: I1003 08:58:26.576658 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-bvzwv" Oct 03 08:58:26 crc kubenswrapper[4899]: I1003 08:58:26.710260 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52b7bde3-1637-40ef-8b47-f918b97a958d-config-data\") pod \"52b7bde3-1637-40ef-8b47-f918b97a958d\" (UID: \"52b7bde3-1637-40ef-8b47-f918b97a958d\") " Oct 03 08:58:26 crc kubenswrapper[4899]: I1003 08:58:26.710296 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52b7bde3-1637-40ef-8b47-f918b97a958d-scripts\") pod \"52b7bde3-1637-40ef-8b47-f918b97a958d\" (UID: \"52b7bde3-1637-40ef-8b47-f918b97a958d\") " Oct 03 08:58:26 crc kubenswrapper[4899]: I1003 08:58:26.710440 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46vbf\" (UniqueName: \"kubernetes.io/projected/52b7bde3-1637-40ef-8b47-f918b97a958d-kube-api-access-46vbf\") pod \"52b7bde3-1637-40ef-8b47-f918b97a958d\" (UID: \"52b7bde3-1637-40ef-8b47-f918b97a958d\") " Oct 03 08:58:26 crc kubenswrapper[4899]: I1003 08:58:26.710480 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52b7bde3-1637-40ef-8b47-f918b97a958d-combined-ca-bundle\") pod \"52b7bde3-1637-40ef-8b47-f918b97a958d\" (UID: \"52b7bde3-1637-40ef-8b47-f918b97a958d\") " Oct 03 08:58:26 crc kubenswrapper[4899]: I1003 08:58:26.716477 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52b7bde3-1637-40ef-8b47-f918b97a958d-scripts" (OuterVolumeSpecName: "scripts") pod "52b7bde3-1637-40ef-8b47-f918b97a958d" (UID: "52b7bde3-1637-40ef-8b47-f918b97a958d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:26 crc kubenswrapper[4899]: I1003 08:58:26.716764 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52b7bde3-1637-40ef-8b47-f918b97a958d-kube-api-access-46vbf" (OuterVolumeSpecName: "kube-api-access-46vbf") pod "52b7bde3-1637-40ef-8b47-f918b97a958d" (UID: "52b7bde3-1637-40ef-8b47-f918b97a958d"). InnerVolumeSpecName "kube-api-access-46vbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:58:26 crc kubenswrapper[4899]: I1003 08:58:26.741159 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52b7bde3-1637-40ef-8b47-f918b97a958d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "52b7bde3-1637-40ef-8b47-f918b97a958d" (UID: "52b7bde3-1637-40ef-8b47-f918b97a958d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:26 crc kubenswrapper[4899]: I1003 08:58:26.744718 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52b7bde3-1637-40ef-8b47-f918b97a958d-config-data" (OuterVolumeSpecName: "config-data") pod "52b7bde3-1637-40ef-8b47-f918b97a958d" (UID: "52b7bde3-1637-40ef-8b47-f918b97a958d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:26 crc kubenswrapper[4899]: I1003 08:58:26.813401 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46vbf\" (UniqueName: \"kubernetes.io/projected/52b7bde3-1637-40ef-8b47-f918b97a958d-kube-api-access-46vbf\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:26 crc kubenswrapper[4899]: I1003 08:58:26.813577 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52b7bde3-1637-40ef-8b47-f918b97a958d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:26 crc kubenswrapper[4899]: I1003 08:58:26.813664 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52b7bde3-1637-40ef-8b47-f918b97a958d-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:26 crc kubenswrapper[4899]: I1003 08:58:26.813727 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52b7bde3-1637-40ef-8b47-f918b97a958d-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.254705 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-bvzwv" event={"ID":"52b7bde3-1637-40ef-8b47-f918b97a958d","Type":"ContainerDied","Data":"a8c7c4f1ed0bef2180387f5e0c5cac020e4a45958c3c937880d7fcecc68f9024"} Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.255527 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a8c7c4f1ed0bef2180387f5e0c5cac020e4a45958c3c937880d7fcecc68f9024" Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.255021 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-bvzwv" Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.341409 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 08:58:27 crc kubenswrapper[4899]: E1003 08:58:27.341907 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52b7bde3-1637-40ef-8b47-f918b97a958d" containerName="nova-cell0-conductor-db-sync" Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.341927 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="52b7bde3-1637-40ef-8b47-f918b97a958d" containerName="nova-cell0-conductor-db-sync" Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.342188 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="52b7bde3-1637-40ef-8b47-f918b97a958d" containerName="nova-cell0-conductor-db-sync" Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.343122 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.345558 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-2rskq" Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.352733 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.360144 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.525754 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frd96\" (UniqueName: \"kubernetes.io/projected/4a91feca-8e5c-489a-bd2b-222f17e9b6d6-kube-api-access-frd96\") pod \"nova-cell0-conductor-0\" (UID: \"4a91feca-8e5c-489a-bd2b-222f17e9b6d6\") " pod="openstack/nova-cell0-conductor-0" Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.525935 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a91feca-8e5c-489a-bd2b-222f17e9b6d6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4a91feca-8e5c-489a-bd2b-222f17e9b6d6\") " pod="openstack/nova-cell0-conductor-0" Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.525973 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a91feca-8e5c-489a-bd2b-222f17e9b6d6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4a91feca-8e5c-489a-bd2b-222f17e9b6d6\") " pod="openstack/nova-cell0-conductor-0" Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.627670 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a91feca-8e5c-489a-bd2b-222f17e9b6d6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4a91feca-8e5c-489a-bd2b-222f17e9b6d6\") " pod="openstack/nova-cell0-conductor-0" Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.627731 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a91feca-8e5c-489a-bd2b-222f17e9b6d6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4a91feca-8e5c-489a-bd2b-222f17e9b6d6\") " pod="openstack/nova-cell0-conductor-0" Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.627850 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frd96\" (UniqueName: \"kubernetes.io/projected/4a91feca-8e5c-489a-bd2b-222f17e9b6d6-kube-api-access-frd96\") pod \"nova-cell0-conductor-0\" (UID: \"4a91feca-8e5c-489a-bd2b-222f17e9b6d6\") " pod="openstack/nova-cell0-conductor-0" Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.632508 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a91feca-8e5c-489a-bd2b-222f17e9b6d6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4a91feca-8e5c-489a-bd2b-222f17e9b6d6\") " pod="openstack/nova-cell0-conductor-0" Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.632855 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a91feca-8e5c-489a-bd2b-222f17e9b6d6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4a91feca-8e5c-489a-bd2b-222f17e9b6d6\") " pod="openstack/nova-cell0-conductor-0" Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.646175 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frd96\" (UniqueName: \"kubernetes.io/projected/4a91feca-8e5c-489a-bd2b-222f17e9b6d6-kube-api-access-frd96\") pod \"nova-cell0-conductor-0\" (UID: \"4a91feca-8e5c-489a-bd2b-222f17e9b6d6\") " pod="openstack/nova-cell0-conductor-0" Oct 03 08:58:27 crc kubenswrapper[4899]: I1003 08:58:27.662323 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 08:58:28 crc kubenswrapper[4899]: I1003 08:58:28.129688 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 08:58:28 crc kubenswrapper[4899]: W1003 08:58:28.131913 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a91feca_8e5c_489a_bd2b_222f17e9b6d6.slice/crio-e1059438e8ea3022d4cf33d65fd54c35a5ce271e081726fb9f98f4c0fec70bdb WatchSource:0}: Error finding container e1059438e8ea3022d4cf33d65fd54c35a5ce271e081726fb9f98f4c0fec70bdb: Status 404 returned error can't find the container with id e1059438e8ea3022d4cf33d65fd54c35a5ce271e081726fb9f98f4c0fec70bdb Oct 03 08:58:28 crc kubenswrapper[4899]: I1003 08:58:28.267821 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4a91feca-8e5c-489a-bd2b-222f17e9b6d6","Type":"ContainerStarted","Data":"e1059438e8ea3022d4cf33d65fd54c35a5ce271e081726fb9f98f4c0fec70bdb"} Oct 03 08:58:29 crc kubenswrapper[4899]: I1003 08:58:29.277474 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4a91feca-8e5c-489a-bd2b-222f17e9b6d6","Type":"ContainerStarted","Data":"f8ba88b32c85a226f7dad4c0b6674c50cc71745cbe0b63ddae02eb319bfcd97e"} Oct 03 08:58:29 crc kubenswrapper[4899]: I1003 08:58:29.277805 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 03 08:58:29 crc kubenswrapper[4899]: I1003 08:58:29.300327 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.300308632 podStartE2EDuration="2.300308632s" podCreationTimestamp="2025-10-03 08:58:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:58:29.291955949 +0000 UTC m=+1083.399440902" watchObservedRunningTime="2025-10-03 08:58:29.300308632 +0000 UTC m=+1083.407793585" Oct 03 08:58:37 crc kubenswrapper[4899]: I1003 08:58:37.690862 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.107337 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-9sndc"] Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.108659 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9sndc" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.110465 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.110705 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.116454 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-9sndc"] Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.220337 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-config-data\") pod \"nova-cell0-cell-mapping-9sndc\" (UID: \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\") " pod="openstack/nova-cell0-cell-mapping-9sndc" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.221151 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-scripts\") pod \"nova-cell0-cell-mapping-9sndc\" (UID: \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\") " pod="openstack/nova-cell0-cell-mapping-9sndc" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.221308 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-9sndc\" (UID: \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\") " pod="openstack/nova-cell0-cell-mapping-9sndc" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.221334 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nblcg\" (UniqueName: \"kubernetes.io/projected/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-kube-api-access-nblcg\") pod \"nova-cell0-cell-mapping-9sndc\" (UID: \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\") " pod="openstack/nova-cell0-cell-mapping-9sndc" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.300115 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.301986 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.304153 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.314820 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.322839 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-config-data\") pod \"nova-cell0-cell-mapping-9sndc\" (UID: \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\") " pod="openstack/nova-cell0-cell-mapping-9sndc" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.322876 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-scripts\") pod \"nova-cell0-cell-mapping-9sndc\" (UID: \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\") " pod="openstack/nova-cell0-cell-mapping-9sndc" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.322933 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nblcg\" (UniqueName: \"kubernetes.io/projected/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-kube-api-access-nblcg\") pod \"nova-cell0-cell-mapping-9sndc\" (UID: \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\") " pod="openstack/nova-cell0-cell-mapping-9sndc" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.322949 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-9sndc\" (UID: \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\") " pod="openstack/nova-cell0-cell-mapping-9sndc" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.331827 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-scripts\") pod \"nova-cell0-cell-mapping-9sndc\" (UID: \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\") " pod="openstack/nova-cell0-cell-mapping-9sndc" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.344011 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-9sndc\" (UID: \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\") " pod="openstack/nova-cell0-cell-mapping-9sndc" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.354004 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-config-data\") pod \"nova-cell0-cell-mapping-9sndc\" (UID: \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\") " pod="openstack/nova-cell0-cell-mapping-9sndc" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.372650 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nblcg\" (UniqueName: \"kubernetes.io/projected/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-kube-api-access-nblcg\") pod \"nova-cell0-cell-mapping-9sndc\" (UID: \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\") " pod="openstack/nova-cell0-cell-mapping-9sndc" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.425666 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6mrv\" (UniqueName: \"kubernetes.io/projected/cf7512b4-89b9-40c2-b02b-7df3a1732d86-kube-api-access-p6mrv\") pod \"nova-scheduler-0\" (UID: \"cf7512b4-89b9-40c2-b02b-7df3a1732d86\") " pod="openstack/nova-scheduler-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.425785 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf7512b4-89b9-40c2-b02b-7df3a1732d86-config-data\") pod \"nova-scheduler-0\" (UID: \"cf7512b4-89b9-40c2-b02b-7df3a1732d86\") " pod="openstack/nova-scheduler-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.425820 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf7512b4-89b9-40c2-b02b-7df3a1732d86-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cf7512b4-89b9-40c2-b02b-7df3a1732d86\") " pod="openstack/nova-scheduler-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.430545 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9sndc" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.438327 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.440274 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.465175 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.485261 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.518566 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.520166 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.528288 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-logs\") pod \"nova-metadata-0\" (UID: \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\") " pod="openstack/nova-metadata-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.528359 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-config-data\") pod \"nova-metadata-0\" (UID: \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\") " pod="openstack/nova-metadata-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.528567 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8w2b\" (UniqueName: \"kubernetes.io/projected/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-kube-api-access-d8w2b\") pod \"nova-metadata-0\" (UID: \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\") " pod="openstack/nova-metadata-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.528653 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6mrv\" (UniqueName: \"kubernetes.io/projected/cf7512b4-89b9-40c2-b02b-7df3a1732d86-kube-api-access-p6mrv\") pod \"nova-scheduler-0\" (UID: \"cf7512b4-89b9-40c2-b02b-7df3a1732d86\") " pod="openstack/nova-scheduler-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.528817 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\") " pod="openstack/nova-metadata-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.529015 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf7512b4-89b9-40c2-b02b-7df3a1732d86-config-data\") pod \"nova-scheduler-0\" (UID: \"cf7512b4-89b9-40c2-b02b-7df3a1732d86\") " pod="openstack/nova-scheduler-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.529060 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf7512b4-89b9-40c2-b02b-7df3a1732d86-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cf7512b4-89b9-40c2-b02b-7df3a1732d86\") " pod="openstack/nova-scheduler-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.536322 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.544091 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf7512b4-89b9-40c2-b02b-7df3a1732d86-config-data\") pod \"nova-scheduler-0\" (UID: \"cf7512b4-89b9-40c2-b02b-7df3a1732d86\") " pod="openstack/nova-scheduler-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.562024 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf7512b4-89b9-40c2-b02b-7df3a1732d86-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cf7512b4-89b9-40c2-b02b-7df3a1732d86\") " pod="openstack/nova-scheduler-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.585581 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6mrv\" (UniqueName: \"kubernetes.io/projected/cf7512b4-89b9-40c2-b02b-7df3a1732d86-kube-api-access-p6mrv\") pod \"nova-scheduler-0\" (UID: \"cf7512b4-89b9-40c2-b02b-7df3a1732d86\") " pod="openstack/nova-scheduler-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.605915 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.620340 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.644731 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-config-data\") pod \"nova-metadata-0\" (UID: \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\") " pod="openstack/nova-metadata-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.644793 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thqrc\" (UniqueName: \"kubernetes.io/projected/04d02dab-0232-4bd2-98a2-daae43c06f84-kube-api-access-thqrc\") pod \"nova-cell1-novncproxy-0\" (UID: \"04d02dab-0232-4bd2-98a2-daae43c06f84\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.644831 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04d02dab-0232-4bd2-98a2-daae43c06f84-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"04d02dab-0232-4bd2-98a2-daae43c06f84\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.644858 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8w2b\" (UniqueName: \"kubernetes.io/projected/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-kube-api-access-d8w2b\") pod \"nova-metadata-0\" (UID: \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\") " pod="openstack/nova-metadata-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.644907 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04d02dab-0232-4bd2-98a2-daae43c06f84-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"04d02dab-0232-4bd2-98a2-daae43c06f84\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.645022 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\") " pod="openstack/nova-metadata-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.645166 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-logs\") pod \"nova-metadata-0\" (UID: \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\") " pod="openstack/nova-metadata-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.645731 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-logs\") pod \"nova-metadata-0\" (UID: \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\") " pod="openstack/nova-metadata-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.649502 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\") " pod="openstack/nova-metadata-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.652724 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-config-data\") pod \"nova-metadata-0\" (UID: \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\") " pod="openstack/nova-metadata-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.673168 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8w2b\" (UniqueName: \"kubernetes.io/projected/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-kube-api-access-d8w2b\") pod \"nova-metadata-0\" (UID: \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\") " pod="openstack/nova-metadata-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.684519 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.686419 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.700951 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.730694 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-672tv"] Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.733282 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.746770 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thqrc\" (UniqueName: \"kubernetes.io/projected/04d02dab-0232-4bd2-98a2-daae43c06f84-kube-api-access-thqrc\") pod \"nova-cell1-novncproxy-0\" (UID: \"04d02dab-0232-4bd2-98a2-daae43c06f84\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.747400 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04d02dab-0232-4bd2-98a2-daae43c06f84-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"04d02dab-0232-4bd2-98a2-daae43c06f84\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.748190 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04d02dab-0232-4bd2-98a2-daae43c06f84-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"04d02dab-0232-4bd2-98a2-daae43c06f84\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.747345 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.758158 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04d02dab-0232-4bd2-98a2-daae43c06f84-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"04d02dab-0232-4bd2-98a2-daae43c06f84\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.758649 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04d02dab-0232-4bd2-98a2-daae43c06f84-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"04d02dab-0232-4bd2-98a2-daae43c06f84\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.765275 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-672tv"] Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.776318 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.780119 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thqrc\" (UniqueName: \"kubernetes.io/projected/04d02dab-0232-4bd2-98a2-daae43c06f84-kube-api-access-thqrc\") pod \"nova-cell1-novncproxy-0\" (UID: \"04d02dab-0232-4bd2-98a2-daae43c06f84\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.855732 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vknds\" (UniqueName: \"kubernetes.io/projected/5e6940c0-2f3d-4053-931b-11d67921c897-kube-api-access-vknds\") pod \"nova-api-0\" (UID: \"5e6940c0-2f3d-4053-931b-11d67921c897\") " pod="openstack/nova-api-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.855791 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-dns-svc\") pod \"dnsmasq-dns-bccf8f775-672tv\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.855847 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-672tv\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.858572 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpxtr\" (UniqueName: \"kubernetes.io/projected/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-kube-api-access-dpxtr\") pod \"dnsmasq-dns-bccf8f775-672tv\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.858656 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-config\") pod \"dnsmasq-dns-bccf8f775-672tv\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.858847 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e6940c0-2f3d-4053-931b-11d67921c897-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5e6940c0-2f3d-4053-931b-11d67921c897\") " pod="openstack/nova-api-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.859027 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-672tv\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.859178 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e6940c0-2f3d-4053-931b-11d67921c897-config-data\") pod \"nova-api-0\" (UID: \"5e6940c0-2f3d-4053-931b-11d67921c897\") " pod="openstack/nova-api-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.859280 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e6940c0-2f3d-4053-931b-11d67921c897-logs\") pod \"nova-api-0\" (UID: \"5e6940c0-2f3d-4053-931b-11d67921c897\") " pod="openstack/nova-api-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.859308 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-672tv\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.952112 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.961502 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e6940c0-2f3d-4053-931b-11d67921c897-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5e6940c0-2f3d-4053-931b-11d67921c897\") " pod="openstack/nova-api-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.961602 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-672tv\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.961677 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e6940c0-2f3d-4053-931b-11d67921c897-config-data\") pod \"nova-api-0\" (UID: \"5e6940c0-2f3d-4053-931b-11d67921c897\") " pod="openstack/nova-api-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.961727 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e6940c0-2f3d-4053-931b-11d67921c897-logs\") pod \"nova-api-0\" (UID: \"5e6940c0-2f3d-4053-931b-11d67921c897\") " pod="openstack/nova-api-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.961754 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-672tv\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.961785 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vknds\" (UniqueName: \"kubernetes.io/projected/5e6940c0-2f3d-4053-931b-11d67921c897-kube-api-access-vknds\") pod \"nova-api-0\" (UID: \"5e6940c0-2f3d-4053-931b-11d67921c897\") " pod="openstack/nova-api-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.961815 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-dns-svc\") pod \"dnsmasq-dns-bccf8f775-672tv\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.961872 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-672tv\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.961927 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpxtr\" (UniqueName: \"kubernetes.io/projected/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-kube-api-access-dpxtr\") pod \"dnsmasq-dns-bccf8f775-672tv\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.961970 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-config\") pod \"dnsmasq-dns-bccf8f775-672tv\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.962543 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e6940c0-2f3d-4053-931b-11d67921c897-logs\") pod \"nova-api-0\" (UID: \"5e6940c0-2f3d-4053-931b-11d67921c897\") " pod="openstack/nova-api-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.962645 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-672tv\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.963026 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-config\") pod \"dnsmasq-dns-bccf8f775-672tv\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.963026 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-672tv\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.963248 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-672tv\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.963668 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-dns-svc\") pod \"dnsmasq-dns-bccf8f775-672tv\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.966924 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e6940c0-2f3d-4053-931b-11d67921c897-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5e6940c0-2f3d-4053-931b-11d67921c897\") " pod="openstack/nova-api-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.969529 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e6940c0-2f3d-4053-931b-11d67921c897-config-data\") pod \"nova-api-0\" (UID: \"5e6940c0-2f3d-4053-931b-11d67921c897\") " pod="openstack/nova-api-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.979492 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vknds\" (UniqueName: \"kubernetes.io/projected/5e6940c0-2f3d-4053-931b-11d67921c897-kube-api-access-vknds\") pod \"nova-api-0\" (UID: \"5e6940c0-2f3d-4053-931b-11d67921c897\") " pod="openstack/nova-api-0" Oct 03 08:58:38 crc kubenswrapper[4899]: I1003 08:58:38.982392 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpxtr\" (UniqueName: \"kubernetes.io/projected/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-kube-api-access-dpxtr\") pod \"dnsmasq-dns-bccf8f775-672tv\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.013851 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.075147 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.188686 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-9sndc"] Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.202018 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 08:58:39 crc kubenswrapper[4899]: W1003 08:58:39.273426 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf7512b4_89b9_40c2_b02b_7df3a1732d86.slice/crio-c3873e1442be42f0812fbc02121932996760cf685edc74657c0e167c5ceaccc5 WatchSource:0}: Error finding container c3873e1442be42f0812fbc02121932996760cf685edc74657c0e167c5ceaccc5: Status 404 returned error can't find the container with id c3873e1442be42f0812fbc02121932996760cf685edc74657c0e167c5ceaccc5 Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.414156 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cf7512b4-89b9-40c2-b02b-7df3a1732d86","Type":"ContainerStarted","Data":"c3873e1442be42f0812fbc02121932996760cf685edc74657c0e167c5ceaccc5"} Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.418237 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.422297 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9sndc" event={"ID":"ad3fd26b-3f3c-4c4e-be50-ce49315719f2","Type":"ContainerStarted","Data":"c324e6303ee3b69e5dc215e9fcdfe7b0cdd9d865ac6d2808020d06a10e0189dd"} Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.518827 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-q2tr6"] Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.520772 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-q2tr6" Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.532148 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.536183 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.577005 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-q2tr6"] Oct 03 08:58:39 crc kubenswrapper[4899]: W1003 08:58:39.577082 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04d02dab_0232_4bd2_98a2_daae43c06f84.slice/crio-6e36e25b21319c14fe4e8f596666616b40f30adccbf7c05d10906056d8238137 WatchSource:0}: Error finding container 6e36e25b21319c14fe4e8f596666616b40f30adccbf7c05d10906056d8238137: Status 404 returned error can't find the container with id 6e36e25b21319c14fe4e8f596666616b40f30adccbf7c05d10906056d8238137 Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.634882 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.678822 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e050af65-bad7-412a-bb6f-7e7bb65573a4-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-q2tr6\" (UID: \"e050af65-bad7-412a-bb6f-7e7bb65573a4\") " pod="openstack/nova-cell1-conductor-db-sync-q2tr6" Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.678937 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e050af65-bad7-412a-bb6f-7e7bb65573a4-config-data\") pod \"nova-cell1-conductor-db-sync-q2tr6\" (UID: \"e050af65-bad7-412a-bb6f-7e7bb65573a4\") " pod="openstack/nova-cell1-conductor-db-sync-q2tr6" Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.678973 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kf6k8\" (UniqueName: \"kubernetes.io/projected/e050af65-bad7-412a-bb6f-7e7bb65573a4-kube-api-access-kf6k8\") pod \"nova-cell1-conductor-db-sync-q2tr6\" (UID: \"e050af65-bad7-412a-bb6f-7e7bb65573a4\") " pod="openstack/nova-cell1-conductor-db-sync-q2tr6" Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.679050 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e050af65-bad7-412a-bb6f-7e7bb65573a4-scripts\") pod \"nova-cell1-conductor-db-sync-q2tr6\" (UID: \"e050af65-bad7-412a-bb6f-7e7bb65573a4\") " pod="openstack/nova-cell1-conductor-db-sync-q2tr6" Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.780440 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e050af65-bad7-412a-bb6f-7e7bb65573a4-config-data\") pod \"nova-cell1-conductor-db-sync-q2tr6\" (UID: \"e050af65-bad7-412a-bb6f-7e7bb65573a4\") " pod="openstack/nova-cell1-conductor-db-sync-q2tr6" Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.780502 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kf6k8\" (UniqueName: \"kubernetes.io/projected/e050af65-bad7-412a-bb6f-7e7bb65573a4-kube-api-access-kf6k8\") pod \"nova-cell1-conductor-db-sync-q2tr6\" (UID: \"e050af65-bad7-412a-bb6f-7e7bb65573a4\") " pod="openstack/nova-cell1-conductor-db-sync-q2tr6" Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.780572 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e050af65-bad7-412a-bb6f-7e7bb65573a4-scripts\") pod \"nova-cell1-conductor-db-sync-q2tr6\" (UID: \"e050af65-bad7-412a-bb6f-7e7bb65573a4\") " pod="openstack/nova-cell1-conductor-db-sync-q2tr6" Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.780634 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e050af65-bad7-412a-bb6f-7e7bb65573a4-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-q2tr6\" (UID: \"e050af65-bad7-412a-bb6f-7e7bb65573a4\") " pod="openstack/nova-cell1-conductor-db-sync-q2tr6" Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.802012 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e050af65-bad7-412a-bb6f-7e7bb65573a4-scripts\") pod \"nova-cell1-conductor-db-sync-q2tr6\" (UID: \"e050af65-bad7-412a-bb6f-7e7bb65573a4\") " pod="openstack/nova-cell1-conductor-db-sync-q2tr6" Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.802039 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e050af65-bad7-412a-bb6f-7e7bb65573a4-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-q2tr6\" (UID: \"e050af65-bad7-412a-bb6f-7e7bb65573a4\") " pod="openstack/nova-cell1-conductor-db-sync-q2tr6" Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.802911 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e050af65-bad7-412a-bb6f-7e7bb65573a4-config-data\") pod \"nova-cell1-conductor-db-sync-q2tr6\" (UID: \"e050af65-bad7-412a-bb6f-7e7bb65573a4\") " pod="openstack/nova-cell1-conductor-db-sync-q2tr6" Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.804875 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kf6k8\" (UniqueName: \"kubernetes.io/projected/e050af65-bad7-412a-bb6f-7e7bb65573a4-kube-api-access-kf6k8\") pod \"nova-cell1-conductor-db-sync-q2tr6\" (UID: \"e050af65-bad7-412a-bb6f-7e7bb65573a4\") " pod="openstack/nova-cell1-conductor-db-sync-q2tr6" Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.837988 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-672tv"] Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.901235 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 08:58:39 crc kubenswrapper[4899]: I1003 08:58:39.933915 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-q2tr6" Oct 03 08:58:40 crc kubenswrapper[4899]: I1003 08:58:40.408475 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-q2tr6"] Oct 03 08:58:40 crc kubenswrapper[4899]: W1003 08:58:40.432340 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode050af65_bad7_412a_bb6f_7e7bb65573a4.slice/crio-12cb8b7d0b0491a08064f8c6d5d3064a364388953f607bc92b655dd92bfb262a WatchSource:0}: Error finding container 12cb8b7d0b0491a08064f8c6d5d3064a364388953f607bc92b655dd92bfb262a: Status 404 returned error can't find the container with id 12cb8b7d0b0491a08064f8c6d5d3064a364388953f607bc92b655dd92bfb262a Oct 03 08:58:40 crc kubenswrapper[4899]: I1003 08:58:40.433701 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b6f9dac-6727-43a1-95ae-0682aacc9bfb","Type":"ContainerStarted","Data":"5d43be55d9e6dd8dade4df7d92d612d969c0d29e74fee33da2ba17f56ef1cec9"} Oct 03 08:58:40 crc kubenswrapper[4899]: I1003 08:58:40.435931 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5e6940c0-2f3d-4053-931b-11d67921c897","Type":"ContainerStarted","Data":"4cc46e1a053783ea1f1f53f210a72f2f190151890b8de8946d27daeca231a33e"} Oct 03 08:58:40 crc kubenswrapper[4899]: I1003 08:58:40.438053 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9sndc" event={"ID":"ad3fd26b-3f3c-4c4e-be50-ce49315719f2","Type":"ContainerStarted","Data":"c37ad0691076d112c3c9c0ca730d7d97fa19cb1e772ef18b8f600b798c847111"} Oct 03 08:58:40 crc kubenswrapper[4899]: I1003 08:58:40.440773 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"04d02dab-0232-4bd2-98a2-daae43c06f84","Type":"ContainerStarted","Data":"6e36e25b21319c14fe4e8f596666616b40f30adccbf7c05d10906056d8238137"} Oct 03 08:58:40 crc kubenswrapper[4899]: I1003 08:58:40.451289 4899 generic.go:334] "Generic (PLEG): container finished" podID="f81cc11b-ac94-4a5b-bd99-50f4580e5f14" containerID="4503ef5a4a8033776f9afa6b352cd9607a1809b24f6cd3d2e3321a9a604a9b29" exitCode=0 Oct 03 08:58:40 crc kubenswrapper[4899]: I1003 08:58:40.451344 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-672tv" event={"ID":"f81cc11b-ac94-4a5b-bd99-50f4580e5f14","Type":"ContainerDied","Data":"4503ef5a4a8033776f9afa6b352cd9607a1809b24f6cd3d2e3321a9a604a9b29"} Oct 03 08:58:40 crc kubenswrapper[4899]: I1003 08:58:40.451375 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-672tv" event={"ID":"f81cc11b-ac94-4a5b-bd99-50f4580e5f14","Type":"ContainerStarted","Data":"a3d523dd4673d24e67dd40990d003fc8b207b19b9f242e39892019f380d2a33b"} Oct 03 08:58:40 crc kubenswrapper[4899]: I1003 08:58:40.473626 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-9sndc" podStartSLOduration=2.473587034 podStartE2EDuration="2.473587034s" podCreationTimestamp="2025-10-03 08:58:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:58:40.451247859 +0000 UTC m=+1094.558732812" watchObservedRunningTime="2025-10-03 08:58:40.473587034 +0000 UTC m=+1094.581072007" Oct 03 08:58:41 crc kubenswrapper[4899]: I1003 08:58:41.463089 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-q2tr6" event={"ID":"e050af65-bad7-412a-bb6f-7e7bb65573a4","Type":"ContainerStarted","Data":"94d809a76375d5b33250adc68a0dd8b5db3a6cb0efd13c6fa4823522858f0951"} Oct 03 08:58:41 crc kubenswrapper[4899]: I1003 08:58:41.463412 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-q2tr6" event={"ID":"e050af65-bad7-412a-bb6f-7e7bb65573a4","Type":"ContainerStarted","Data":"12cb8b7d0b0491a08064f8c6d5d3064a364388953f607bc92b655dd92bfb262a"} Oct 03 08:58:41 crc kubenswrapper[4899]: I1003 08:58:41.472457 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-672tv" event={"ID":"f81cc11b-ac94-4a5b-bd99-50f4580e5f14","Type":"ContainerStarted","Data":"3d7f2ec9196dc1cb33179a8ef659e8b8362613f5d59296049139584d28e2a9a6"} Oct 03 08:58:41 crc kubenswrapper[4899]: I1003 08:58:41.472491 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:41 crc kubenswrapper[4899]: I1003 08:58:41.485540 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-q2tr6" podStartSLOduration=2.485519724 podStartE2EDuration="2.485519724s" podCreationTimestamp="2025-10-03 08:58:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:58:41.477915465 +0000 UTC m=+1095.585400428" watchObservedRunningTime="2025-10-03 08:58:41.485519724 +0000 UTC m=+1095.593004697" Oct 03 08:58:41 crc kubenswrapper[4899]: I1003 08:58:41.511510 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bccf8f775-672tv" podStartSLOduration=3.511487303 podStartE2EDuration="3.511487303s" podCreationTimestamp="2025-10-03 08:58:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:58:41.495477028 +0000 UTC m=+1095.602961981" watchObservedRunningTime="2025-10-03 08:58:41.511487303 +0000 UTC m=+1095.618972256" Oct 03 08:58:42 crc kubenswrapper[4899]: I1003 08:58:42.153921 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 08:58:42 crc kubenswrapper[4899]: I1003 08:58:42.187489 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 08:58:42 crc kubenswrapper[4899]: I1003 08:58:42.198920 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 08:58:42 crc kubenswrapper[4899]: I1003 08:58:42.198980 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 08:58:43 crc kubenswrapper[4899]: I1003 08:58:43.491266 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"04d02dab-0232-4bd2-98a2-daae43c06f84","Type":"ContainerStarted","Data":"d3af43aa8d78309b8fc524c976cdbe7e11e59a24735adac2686c11f2b88f1d6d"} Oct 03 08:58:43 crc kubenswrapper[4899]: I1003 08:58:43.491365 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="04d02dab-0232-4bd2-98a2-daae43c06f84" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://d3af43aa8d78309b8fc524c976cdbe7e11e59a24735adac2686c11f2b88f1d6d" gracePeriod=30 Oct 03 08:58:43 crc kubenswrapper[4899]: I1003 08:58:43.493133 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cf7512b4-89b9-40c2-b02b-7df3a1732d86","Type":"ContainerStarted","Data":"7fd5a07c9e29c1b6bbaa821b320d50bbcef3bddbc61653f5b213a8357fbdf150"} Oct 03 08:58:43 crc kubenswrapper[4899]: I1003 08:58:43.494652 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b6f9dac-6727-43a1-95ae-0682aacc9bfb","Type":"ContainerStarted","Data":"3768fe238946cc8bdd5ccab541b6087cf423d175d0ee57abfc4bb8e0736e2538"} Oct 03 08:58:43 crc kubenswrapper[4899]: I1003 08:58:43.494686 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b6f9dac-6727-43a1-95ae-0682aacc9bfb","Type":"ContainerStarted","Data":"1d73f93d3a8594b2e03b003ff0d89a56f05c90c15682e36e53f82f34787fe47f"} Oct 03 08:58:43 crc kubenswrapper[4899]: I1003 08:58:43.494826 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6b6f9dac-6727-43a1-95ae-0682aacc9bfb" containerName="nova-metadata-log" containerID="cri-o://1d73f93d3a8594b2e03b003ff0d89a56f05c90c15682e36e53f82f34787fe47f" gracePeriod=30 Oct 03 08:58:43 crc kubenswrapper[4899]: I1003 08:58:43.494962 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6b6f9dac-6727-43a1-95ae-0682aacc9bfb" containerName="nova-metadata-metadata" containerID="cri-o://3768fe238946cc8bdd5ccab541b6087cf423d175d0ee57abfc4bb8e0736e2538" gracePeriod=30 Oct 03 08:58:43 crc kubenswrapper[4899]: I1003 08:58:43.499356 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5e6940c0-2f3d-4053-931b-11d67921c897","Type":"ContainerStarted","Data":"2cc28efb223b746b416a13f10e66e28b807d782af405476ccfb2136eb92eac65"} Oct 03 08:58:43 crc kubenswrapper[4899]: I1003 08:58:43.499406 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5e6940c0-2f3d-4053-931b-11d67921c897","Type":"ContainerStarted","Data":"7077f28a79033eb32000208b0921b073331080009398483f357b3371729b78c7"} Oct 03 08:58:43 crc kubenswrapper[4899]: I1003 08:58:43.511826 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.381555172 podStartE2EDuration="5.511804632s" podCreationTimestamp="2025-10-03 08:58:38 +0000 UTC" firstStartedPulling="2025-10-03 08:58:39.582077071 +0000 UTC m=+1093.689562024" lastFinishedPulling="2025-10-03 08:58:42.712326531 +0000 UTC m=+1096.819811484" observedRunningTime="2025-10-03 08:58:43.509569121 +0000 UTC m=+1097.617054074" watchObservedRunningTime="2025-10-03 08:58:43.511804632 +0000 UTC m=+1097.619289585" Oct 03 08:58:43 crc kubenswrapper[4899]: I1003 08:58:43.524480 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.099291861 podStartE2EDuration="5.524461621s" podCreationTimestamp="2025-10-03 08:58:38 +0000 UTC" firstStartedPulling="2025-10-03 08:58:39.286065496 +0000 UTC m=+1093.393550449" lastFinishedPulling="2025-10-03 08:58:42.711235256 +0000 UTC m=+1096.818720209" observedRunningTime="2025-10-03 08:58:43.523549042 +0000 UTC m=+1097.631034015" watchObservedRunningTime="2025-10-03 08:58:43.524461621 +0000 UTC m=+1097.631946574" Oct 03 08:58:43 crc kubenswrapper[4899]: I1003 08:58:43.554048 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.759621244 podStartE2EDuration="5.554024074s" podCreationTimestamp="2025-10-03 08:58:38 +0000 UTC" firstStartedPulling="2025-10-03 08:58:39.91947606 +0000 UTC m=+1094.026961013" lastFinishedPulling="2025-10-03 08:58:42.71387889 +0000 UTC m=+1096.821363843" observedRunningTime="2025-10-03 08:58:43.539856707 +0000 UTC m=+1097.647341660" watchObservedRunningTime="2025-10-03 08:58:43.554024074 +0000 UTC m=+1097.661509027" Oct 03 08:58:43 crc kubenswrapper[4899]: I1003 08:58:43.579667 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.289089857 podStartE2EDuration="5.579644442s" podCreationTimestamp="2025-10-03 08:58:38 +0000 UTC" firstStartedPulling="2025-10-03 08:58:39.421556379 +0000 UTC m=+1093.529041332" lastFinishedPulling="2025-10-03 08:58:42.712110964 +0000 UTC m=+1096.819595917" observedRunningTime="2025-10-03 08:58:43.568840651 +0000 UTC m=+1097.676325604" watchObservedRunningTime="2025-10-03 08:58:43.579644442 +0000 UTC m=+1097.687129405" Oct 03 08:58:43 crc kubenswrapper[4899]: I1003 08:58:43.622004 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 03 08:58:43 crc kubenswrapper[4899]: I1003 08:58:43.777518 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 08:58:43 crc kubenswrapper[4899]: I1003 08:58:43.777836 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 08:58:43 crc kubenswrapper[4899]: I1003 08:58:43.954022 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:58:44 crc kubenswrapper[4899]: I1003 08:58:44.524813 4899 generic.go:334] "Generic (PLEG): container finished" podID="6b6f9dac-6727-43a1-95ae-0682aacc9bfb" containerID="1d73f93d3a8594b2e03b003ff0d89a56f05c90c15682e36e53f82f34787fe47f" exitCode=143 Oct 03 08:58:44 crc kubenswrapper[4899]: I1003 08:58:44.524898 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b6f9dac-6727-43a1-95ae-0682aacc9bfb","Type":"ContainerDied","Data":"1d73f93d3a8594b2e03b003ff0d89a56f05c90c15682e36e53f82f34787fe47f"} Oct 03 08:58:47 crc kubenswrapper[4899]: I1003 08:58:47.559295 4899 generic.go:334] "Generic (PLEG): container finished" podID="ad3fd26b-3f3c-4c4e-be50-ce49315719f2" containerID="c37ad0691076d112c3c9c0ca730d7d97fa19cb1e772ef18b8f600b798c847111" exitCode=0 Oct 03 08:58:47 crc kubenswrapper[4899]: I1003 08:58:47.559373 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9sndc" event={"ID":"ad3fd26b-3f3c-4c4e-be50-ce49315719f2","Type":"ContainerDied","Data":"c37ad0691076d112c3c9c0ca730d7d97fa19cb1e772ef18b8f600b798c847111"} Oct 03 08:58:47 crc kubenswrapper[4899]: I1003 08:58:47.562777 4899 generic.go:334] "Generic (PLEG): container finished" podID="e050af65-bad7-412a-bb6f-7e7bb65573a4" containerID="94d809a76375d5b33250adc68a0dd8b5db3a6cb0efd13c6fa4823522858f0951" exitCode=0 Oct 03 08:58:47 crc kubenswrapper[4899]: I1003 08:58:47.562858 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-q2tr6" event={"ID":"e050af65-bad7-412a-bb6f-7e7bb65573a4","Type":"ContainerDied","Data":"94d809a76375d5b33250adc68a0dd8b5db3a6cb0efd13c6fa4823522858f0951"} Oct 03 08:58:47 crc kubenswrapper[4899]: I1003 08:58:47.640372 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 03 08:58:48 crc kubenswrapper[4899]: I1003 08:58:48.621753 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 03 08:58:48 crc kubenswrapper[4899]: I1003 08:58:48.649342 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.014184 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.014626 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.030311 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-q2tr6" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.038844 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9sndc" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.077012 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.167276 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kf6k8\" (UniqueName: \"kubernetes.io/projected/e050af65-bad7-412a-bb6f-7e7bb65573a4-kube-api-access-kf6k8\") pod \"e050af65-bad7-412a-bb6f-7e7bb65573a4\" (UID: \"e050af65-bad7-412a-bb6f-7e7bb65573a4\") " Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.167369 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nblcg\" (UniqueName: \"kubernetes.io/projected/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-kube-api-access-nblcg\") pod \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\" (UID: \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\") " Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.167438 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e050af65-bad7-412a-bb6f-7e7bb65573a4-config-data\") pod \"e050af65-bad7-412a-bb6f-7e7bb65573a4\" (UID: \"e050af65-bad7-412a-bb6f-7e7bb65573a4\") " Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.167466 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-combined-ca-bundle\") pod \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\" (UID: \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\") " Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.167529 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e050af65-bad7-412a-bb6f-7e7bb65573a4-combined-ca-bundle\") pod \"e050af65-bad7-412a-bb6f-7e7bb65573a4\" (UID: \"e050af65-bad7-412a-bb6f-7e7bb65573a4\") " Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.167612 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-scripts\") pod \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\" (UID: \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\") " Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.167650 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e050af65-bad7-412a-bb6f-7e7bb65573a4-scripts\") pod \"e050af65-bad7-412a-bb6f-7e7bb65573a4\" (UID: \"e050af65-bad7-412a-bb6f-7e7bb65573a4\") " Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.167692 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-config-data\") pod \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\" (UID: \"ad3fd26b-3f3c-4c4e-be50-ce49315719f2\") " Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.178552 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-nxxw4"] Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.180600 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" podUID="72d39196-9303-41a7-aa15-6eb7078f3b25" containerName="dnsmasq-dns" containerID="cri-o://dfece2e32f675eea8d61f56dd029aa07e09b6b957647a85c322c24e877242548" gracePeriod=10 Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.185737 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e050af65-bad7-412a-bb6f-7e7bb65573a4-kube-api-access-kf6k8" (OuterVolumeSpecName: "kube-api-access-kf6k8") pod "e050af65-bad7-412a-bb6f-7e7bb65573a4" (UID: "e050af65-bad7-412a-bb6f-7e7bb65573a4"). InnerVolumeSpecName "kube-api-access-kf6k8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.186014 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-kube-api-access-nblcg" (OuterVolumeSpecName: "kube-api-access-nblcg") pod "ad3fd26b-3f3c-4c4e-be50-ce49315719f2" (UID: "ad3fd26b-3f3c-4c4e-be50-ce49315719f2"). InnerVolumeSpecName "kube-api-access-nblcg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.187769 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e050af65-bad7-412a-bb6f-7e7bb65573a4-scripts" (OuterVolumeSpecName: "scripts") pod "e050af65-bad7-412a-bb6f-7e7bb65573a4" (UID: "e050af65-bad7-412a-bb6f-7e7bb65573a4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.215511 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-scripts" (OuterVolumeSpecName: "scripts") pod "ad3fd26b-3f3c-4c4e-be50-ce49315719f2" (UID: "ad3fd26b-3f3c-4c4e-be50-ce49315719f2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.220736 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ad3fd26b-3f3c-4c4e-be50-ce49315719f2" (UID: "ad3fd26b-3f3c-4c4e-be50-ce49315719f2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.246405 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e050af65-bad7-412a-bb6f-7e7bb65573a4-config-data" (OuterVolumeSpecName: "config-data") pod "e050af65-bad7-412a-bb6f-7e7bb65573a4" (UID: "e050af65-bad7-412a-bb6f-7e7bb65573a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.258069 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e050af65-bad7-412a-bb6f-7e7bb65573a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e050af65-bad7-412a-bb6f-7e7bb65573a4" (UID: "e050af65-bad7-412a-bb6f-7e7bb65573a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.274151 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.274177 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e050af65-bad7-412a-bb6f-7e7bb65573a4-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.274186 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kf6k8\" (UniqueName: \"kubernetes.io/projected/e050af65-bad7-412a-bb6f-7e7bb65573a4-kube-api-access-kf6k8\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.274195 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nblcg\" (UniqueName: \"kubernetes.io/projected/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-kube-api-access-nblcg\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.274205 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e050af65-bad7-412a-bb6f-7e7bb65573a4-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.274213 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.274222 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e050af65-bad7-412a-bb6f-7e7bb65573a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.282145 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-config-data" (OuterVolumeSpecName: "config-data") pod "ad3fd26b-3f3c-4c4e-be50-ce49315719f2" (UID: "ad3fd26b-3f3c-4c4e-be50-ce49315719f2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.376431 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad3fd26b-3f3c-4c4e-be50-ce49315719f2-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.622678 4899 generic.go:334] "Generic (PLEG): container finished" podID="72d39196-9303-41a7-aa15-6eb7078f3b25" containerID="dfece2e32f675eea8d61f56dd029aa07e09b6b957647a85c322c24e877242548" exitCode=0 Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.622734 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" event={"ID":"72d39196-9303-41a7-aa15-6eb7078f3b25","Type":"ContainerDied","Data":"dfece2e32f675eea8d61f56dd029aa07e09b6b957647a85c322c24e877242548"} Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.635671 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9sndc" event={"ID":"ad3fd26b-3f3c-4c4e-be50-ce49315719f2","Type":"ContainerDied","Data":"c324e6303ee3b69e5dc215e9fcdfe7b0cdd9d865ac6d2808020d06a10e0189dd"} Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.635728 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c324e6303ee3b69e5dc215e9fcdfe7b0cdd9d865ac6d2808020d06a10e0189dd" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.635848 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9sndc" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.644518 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-q2tr6" event={"ID":"e050af65-bad7-412a-bb6f-7e7bb65573a4","Type":"ContainerDied","Data":"12cb8b7d0b0491a08064f8c6d5d3064a364388953f607bc92b655dd92bfb262a"} Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.644565 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12cb8b7d0b0491a08064f8c6d5d3064a364388953f607bc92b655dd92bfb262a" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.644661 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-q2tr6" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.651427 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.738746 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.775031 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 08:58:49 crc kubenswrapper[4899]: E1003 08:58:49.775613 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e050af65-bad7-412a-bb6f-7e7bb65573a4" containerName="nova-cell1-conductor-db-sync" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.775629 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="e050af65-bad7-412a-bb6f-7e7bb65573a4" containerName="nova-cell1-conductor-db-sync" Oct 03 08:58:49 crc kubenswrapper[4899]: E1003 08:58:49.775644 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72d39196-9303-41a7-aa15-6eb7078f3b25" containerName="dnsmasq-dns" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.775651 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="72d39196-9303-41a7-aa15-6eb7078f3b25" containerName="dnsmasq-dns" Oct 03 08:58:49 crc kubenswrapper[4899]: E1003 08:58:49.775684 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72d39196-9303-41a7-aa15-6eb7078f3b25" containerName="init" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.775693 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="72d39196-9303-41a7-aa15-6eb7078f3b25" containerName="init" Oct 03 08:58:49 crc kubenswrapper[4899]: E1003 08:58:49.775709 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad3fd26b-3f3c-4c4e-be50-ce49315719f2" containerName="nova-manage" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.775717 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad3fd26b-3f3c-4c4e-be50-ce49315719f2" containerName="nova-manage" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.775971 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad3fd26b-3f3c-4c4e-be50-ce49315719f2" containerName="nova-manage" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.775988 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="e050af65-bad7-412a-bb6f-7e7bb65573a4" containerName="nova-cell1-conductor-db-sync" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.776013 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="72d39196-9303-41a7-aa15-6eb7078f3b25" containerName="dnsmasq-dns" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.776830 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.781131 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.783873 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.786573 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-ovsdbserver-sb\") pod \"72d39196-9303-41a7-aa15-6eb7078f3b25\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.786664 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-config\") pod \"72d39196-9303-41a7-aa15-6eb7078f3b25\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.786774 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-dns-svc\") pod \"72d39196-9303-41a7-aa15-6eb7078f3b25\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.786819 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-dns-swift-storage-0\") pod \"72d39196-9303-41a7-aa15-6eb7078f3b25\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.786857 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-ovsdbserver-nb\") pod \"72d39196-9303-41a7-aa15-6eb7078f3b25\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.786924 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxn4r\" (UniqueName: \"kubernetes.io/projected/72d39196-9303-41a7-aa15-6eb7078f3b25-kube-api-access-dxn4r\") pod \"72d39196-9303-41a7-aa15-6eb7078f3b25\" (UID: \"72d39196-9303-41a7-aa15-6eb7078f3b25\") " Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.802468 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72d39196-9303-41a7-aa15-6eb7078f3b25-kube-api-access-dxn4r" (OuterVolumeSpecName: "kube-api-access-dxn4r") pod "72d39196-9303-41a7-aa15-6eb7078f3b25" (UID: "72d39196-9303-41a7-aa15-6eb7078f3b25"). InnerVolumeSpecName "kube-api-access-dxn4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.851484 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.879665 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "72d39196-9303-41a7-aa15-6eb7078f3b25" (UID: "72d39196-9303-41a7-aa15-6eb7078f3b25"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.890009 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6v8v\" (UniqueName: \"kubernetes.io/projected/d2f278b3-9210-4f40-96f3-1605efa157ef-kube-api-access-d6v8v\") pod \"nova-cell1-conductor-0\" (UID: \"d2f278b3-9210-4f40-96f3-1605efa157ef\") " pod="openstack/nova-cell1-conductor-0" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.890071 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2f278b3-9210-4f40-96f3-1605efa157ef-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d2f278b3-9210-4f40-96f3-1605efa157ef\") " pod="openstack/nova-cell1-conductor-0" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.890105 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2f278b3-9210-4f40-96f3-1605efa157ef-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d2f278b3-9210-4f40-96f3-1605efa157ef\") " pod="openstack/nova-cell1-conductor-0" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.890175 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxn4r\" (UniqueName: \"kubernetes.io/projected/72d39196-9303-41a7-aa15-6eb7078f3b25-kube-api-access-dxn4r\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.890191 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.901075 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "72d39196-9303-41a7-aa15-6eb7078f3b25" (UID: "72d39196-9303-41a7-aa15-6eb7078f3b25"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.902271 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "72d39196-9303-41a7-aa15-6eb7078f3b25" (UID: "72d39196-9303-41a7-aa15-6eb7078f3b25"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.907438 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "72d39196-9303-41a7-aa15-6eb7078f3b25" (UID: "72d39196-9303-41a7-aa15-6eb7078f3b25"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.910493 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-config" (OuterVolumeSpecName: "config") pod "72d39196-9303-41a7-aa15-6eb7078f3b25" (UID: "72d39196-9303-41a7-aa15-6eb7078f3b25"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.991243 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2f278b3-9210-4f40-96f3-1605efa157ef-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d2f278b3-9210-4f40-96f3-1605efa157ef\") " pod="openstack/nova-cell1-conductor-0" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.991490 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6v8v\" (UniqueName: \"kubernetes.io/projected/d2f278b3-9210-4f40-96f3-1605efa157ef-kube-api-access-d6v8v\") pod \"nova-cell1-conductor-0\" (UID: \"d2f278b3-9210-4f40-96f3-1605efa157ef\") " pod="openstack/nova-cell1-conductor-0" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.991527 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2f278b3-9210-4f40-96f3-1605efa157ef-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d2f278b3-9210-4f40-96f3-1605efa157ef\") " pod="openstack/nova-cell1-conductor-0" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.991587 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.991605 4899 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.991615 4899 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.991624 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72d39196-9303-41a7-aa15-6eb7078f3b25-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.996577 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2f278b3-9210-4f40-96f3-1605efa157ef-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d2f278b3-9210-4f40-96f3-1605efa157ef\") " pod="openstack/nova-cell1-conductor-0" Oct 03 08:58:49 crc kubenswrapper[4899]: I1003 08:58:49.996683 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2f278b3-9210-4f40-96f3-1605efa157ef-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d2f278b3-9210-4f40-96f3-1605efa157ef\") " pod="openstack/nova-cell1-conductor-0" Oct 03 08:58:50 crc kubenswrapper[4899]: I1003 08:58:50.014527 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6v8v\" (UniqueName: \"kubernetes.io/projected/d2f278b3-9210-4f40-96f3-1605efa157ef-kube-api-access-d6v8v\") pod \"nova-cell1-conductor-0\" (UID: \"d2f278b3-9210-4f40-96f3-1605efa157ef\") " pod="openstack/nova-cell1-conductor-0" Oct 03 08:58:50 crc kubenswrapper[4899]: I1003 08:58:50.096115 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5e6940c0-2f3d-4053-931b-11d67921c897" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 08:58:50 crc kubenswrapper[4899]: I1003 08:58:50.096134 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5e6940c0-2f3d-4053-931b-11d67921c897" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 08:58:50 crc kubenswrapper[4899]: I1003 08:58:50.120444 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 08:58:50 crc kubenswrapper[4899]: I1003 08:58:50.466567 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 08:58:50 crc kubenswrapper[4899]: I1003 08:58:50.657859 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 08:58:50 crc kubenswrapper[4899]: I1003 08:58:50.659205 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" event={"ID":"72d39196-9303-41a7-aa15-6eb7078f3b25","Type":"ContainerDied","Data":"39058808ec53cce944ec76e418ca4e9a629f0bfda817bbd6ebad20f6e0de45b3"} Oct 03 08:58:50 crc kubenswrapper[4899]: I1003 08:58:50.659258 4899 scope.go:117] "RemoveContainer" containerID="dfece2e32f675eea8d61f56dd029aa07e09b6b957647a85c322c24e877242548" Oct 03 08:58:50 crc kubenswrapper[4899]: I1003 08:58:50.659302 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-nxxw4" Oct 03 08:58:50 crc kubenswrapper[4899]: I1003 08:58:50.659680 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="5e6940c0-2f3d-4053-931b-11d67921c897" containerName="nova-api-log" containerID="cri-o://7077f28a79033eb32000208b0921b073331080009398483f357b3371729b78c7" gracePeriod=30 Oct 03 08:58:50 crc kubenswrapper[4899]: I1003 08:58:50.660095 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="5e6940c0-2f3d-4053-931b-11d67921c897" containerName="nova-api-api" containerID="cri-o://2cc28efb223b746b416a13f10e66e28b807d782af405476ccfb2136eb92eac65" gracePeriod=30 Oct 03 08:58:50 crc kubenswrapper[4899]: I1003 08:58:50.687836 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-nxxw4"] Oct 03 08:58:50 crc kubenswrapper[4899]: I1003 08:58:50.701105 4899 scope.go:117] "RemoveContainer" containerID="e65245adac283275d339b23ea90eff337b3d2abe1d09e93a4b0d3d164b8de41e" Oct 03 08:58:50 crc kubenswrapper[4899]: I1003 08:58:50.705196 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-nxxw4"] Oct 03 08:58:51 crc kubenswrapper[4899]: I1003 08:58:51.669537 4899 generic.go:334] "Generic (PLEG): container finished" podID="5e6940c0-2f3d-4053-931b-11d67921c897" containerID="7077f28a79033eb32000208b0921b073331080009398483f357b3371729b78c7" exitCode=143 Oct 03 08:58:51 crc kubenswrapper[4899]: I1003 08:58:51.669950 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5e6940c0-2f3d-4053-931b-11d67921c897","Type":"ContainerDied","Data":"7077f28a79033eb32000208b0921b073331080009398483f357b3371729b78c7"} Oct 03 08:58:51 crc kubenswrapper[4899]: I1003 08:58:51.689731 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="cf7512b4-89b9-40c2-b02b-7df3a1732d86" containerName="nova-scheduler-scheduler" containerID="cri-o://7fd5a07c9e29c1b6bbaa821b320d50bbcef3bddbc61653f5b213a8357fbdf150" gracePeriod=30 Oct 03 08:58:51 crc kubenswrapper[4899]: I1003 08:58:51.689834 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"d2f278b3-9210-4f40-96f3-1605efa157ef","Type":"ContainerStarted","Data":"1e65c94454d56e69369546707f7810874516d2cdc175ccd15778454adb86f556"} Oct 03 08:58:51 crc kubenswrapper[4899]: I1003 08:58:51.689859 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"d2f278b3-9210-4f40-96f3-1605efa157ef","Type":"ContainerStarted","Data":"7373a43ec93c7fb8cda385f4df69628d64776455fb22c8e053637d3a8401948d"} Oct 03 08:58:51 crc kubenswrapper[4899]: I1003 08:58:51.690324 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 03 08:58:51 crc kubenswrapper[4899]: I1003 08:58:51.718461 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.718445816 podStartE2EDuration="2.718445816s" podCreationTimestamp="2025-10-03 08:58:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:58:51.710847395 +0000 UTC m=+1105.818332428" watchObservedRunningTime="2025-10-03 08:58:51.718445816 +0000 UTC m=+1105.825930769" Oct 03 08:58:52 crc kubenswrapper[4899]: I1003 08:58:52.540377 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72d39196-9303-41a7-aa15-6eb7078f3b25" path="/var/lib/kubelet/pods/72d39196-9303-41a7-aa15-6eb7078f3b25/volumes" Oct 03 08:58:52 crc kubenswrapper[4899]: I1003 08:58:52.754925 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 08:58:52 crc kubenswrapper[4899]: I1003 08:58:52.755150 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="d6a2ca35-de4c-429e-9217-5047b31741ad" containerName="kube-state-metrics" containerID="cri-o://e5e5dd3eac5f7c8b5bb9c58edb0885647f3f7a07220ffd557c4891fa7ab8e36b" gracePeriod=30 Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.262126 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.357751 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcppj\" (UniqueName: \"kubernetes.io/projected/d6a2ca35-de4c-429e-9217-5047b31741ad-kube-api-access-lcppj\") pod \"d6a2ca35-de4c-429e-9217-5047b31741ad\" (UID: \"d6a2ca35-de4c-429e-9217-5047b31741ad\") " Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.365983 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6a2ca35-de4c-429e-9217-5047b31741ad-kube-api-access-lcppj" (OuterVolumeSpecName: "kube-api-access-lcppj") pod "d6a2ca35-de4c-429e-9217-5047b31741ad" (UID: "d6a2ca35-de4c-429e-9217-5047b31741ad"). InnerVolumeSpecName "kube-api-access-lcppj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.462191 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcppj\" (UniqueName: \"kubernetes.io/projected/d6a2ca35-de4c-429e-9217-5047b31741ad-kube-api-access-lcppj\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:53 crc kubenswrapper[4899]: E1003 08:58:53.624717 4899 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7fd5a07c9e29c1b6bbaa821b320d50bbcef3bddbc61653f5b213a8357fbdf150" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 08:58:53 crc kubenswrapper[4899]: E1003 08:58:53.627173 4899 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7fd5a07c9e29c1b6bbaa821b320d50bbcef3bddbc61653f5b213a8357fbdf150" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 08:58:53 crc kubenswrapper[4899]: E1003 08:58:53.628815 4899 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7fd5a07c9e29c1b6bbaa821b320d50bbcef3bddbc61653f5b213a8357fbdf150" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 08:58:53 crc kubenswrapper[4899]: E1003 08:58:53.628852 4899 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="cf7512b4-89b9-40c2-b02b-7df3a1732d86" containerName="nova-scheduler-scheduler" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.712332 4899 generic.go:334] "Generic (PLEG): container finished" podID="d6a2ca35-de4c-429e-9217-5047b31741ad" containerID="e5e5dd3eac5f7c8b5bb9c58edb0885647f3f7a07220ffd557c4891fa7ab8e36b" exitCode=2 Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.712417 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d6a2ca35-de4c-429e-9217-5047b31741ad","Type":"ContainerDied","Data":"e5e5dd3eac5f7c8b5bb9c58edb0885647f3f7a07220ffd557c4891fa7ab8e36b"} Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.712465 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.712969 4899 scope.go:117] "RemoveContainer" containerID="e5e5dd3eac5f7c8b5bb9c58edb0885647f3f7a07220ffd557c4891fa7ab8e36b" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.712851 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d6a2ca35-de4c-429e-9217-5047b31741ad","Type":"ContainerDied","Data":"8c3a5186ea5cdd8a0a57967437f4f3e24fbdffe56438a8088bf63b656f6bba3d"} Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.748087 4899 scope.go:117] "RemoveContainer" containerID="e5e5dd3eac5f7c8b5bb9c58edb0885647f3f7a07220ffd557c4891fa7ab8e36b" Oct 03 08:58:53 crc kubenswrapper[4899]: E1003 08:58:53.748736 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5e5dd3eac5f7c8b5bb9c58edb0885647f3f7a07220ffd557c4891fa7ab8e36b\": container with ID starting with e5e5dd3eac5f7c8b5bb9c58edb0885647f3f7a07220ffd557c4891fa7ab8e36b not found: ID does not exist" containerID="e5e5dd3eac5f7c8b5bb9c58edb0885647f3f7a07220ffd557c4891fa7ab8e36b" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.748774 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5e5dd3eac5f7c8b5bb9c58edb0885647f3f7a07220ffd557c4891fa7ab8e36b"} err="failed to get container status \"e5e5dd3eac5f7c8b5bb9c58edb0885647f3f7a07220ffd557c4891fa7ab8e36b\": rpc error: code = NotFound desc = could not find container \"e5e5dd3eac5f7c8b5bb9c58edb0885647f3f7a07220ffd557c4891fa7ab8e36b\": container with ID starting with e5e5dd3eac5f7c8b5bb9c58edb0885647f3f7a07220ffd557c4891fa7ab8e36b not found: ID does not exist" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.752707 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.765958 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.775955 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 08:58:53 crc kubenswrapper[4899]: E1003 08:58:53.776471 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6a2ca35-de4c-429e-9217-5047b31741ad" containerName="kube-state-metrics" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.776507 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6a2ca35-de4c-429e-9217-5047b31741ad" containerName="kube-state-metrics" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.776745 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6a2ca35-de4c-429e-9217-5047b31741ad" containerName="kube-state-metrics" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.778500 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.782923 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.783005 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.803093 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.868450 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c05f2aa3-2568-45fa-ad1c-704870317a49-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"c05f2aa3-2568-45fa-ad1c-704870317a49\") " pod="openstack/kube-state-metrics-0" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.868503 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/c05f2aa3-2568-45fa-ad1c-704870317a49-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"c05f2aa3-2568-45fa-ad1c-704870317a49\") " pod="openstack/kube-state-metrics-0" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.868636 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/c05f2aa3-2568-45fa-ad1c-704870317a49-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"c05f2aa3-2568-45fa-ad1c-704870317a49\") " pod="openstack/kube-state-metrics-0" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.868658 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvr7m\" (UniqueName: \"kubernetes.io/projected/c05f2aa3-2568-45fa-ad1c-704870317a49-kube-api-access-jvr7m\") pod \"kube-state-metrics-0\" (UID: \"c05f2aa3-2568-45fa-ad1c-704870317a49\") " pod="openstack/kube-state-metrics-0" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.970278 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/c05f2aa3-2568-45fa-ad1c-704870317a49-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"c05f2aa3-2568-45fa-ad1c-704870317a49\") " pod="openstack/kube-state-metrics-0" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.970349 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvr7m\" (UniqueName: \"kubernetes.io/projected/c05f2aa3-2568-45fa-ad1c-704870317a49-kube-api-access-jvr7m\") pod \"kube-state-metrics-0\" (UID: \"c05f2aa3-2568-45fa-ad1c-704870317a49\") " pod="openstack/kube-state-metrics-0" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.970817 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c05f2aa3-2568-45fa-ad1c-704870317a49-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"c05f2aa3-2568-45fa-ad1c-704870317a49\") " pod="openstack/kube-state-metrics-0" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.971168 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/c05f2aa3-2568-45fa-ad1c-704870317a49-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"c05f2aa3-2568-45fa-ad1c-704870317a49\") " pod="openstack/kube-state-metrics-0" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.975611 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/c05f2aa3-2568-45fa-ad1c-704870317a49-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"c05f2aa3-2568-45fa-ad1c-704870317a49\") " pod="openstack/kube-state-metrics-0" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.975761 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/c05f2aa3-2568-45fa-ad1c-704870317a49-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"c05f2aa3-2568-45fa-ad1c-704870317a49\") " pod="openstack/kube-state-metrics-0" Oct 03 08:58:53 crc kubenswrapper[4899]: I1003 08:58:53.978658 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c05f2aa3-2568-45fa-ad1c-704870317a49-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"c05f2aa3-2568-45fa-ad1c-704870317a49\") " pod="openstack/kube-state-metrics-0" Oct 03 08:58:54 crc kubenswrapper[4899]: I1003 08:58:54.008331 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvr7m\" (UniqueName: \"kubernetes.io/projected/c05f2aa3-2568-45fa-ad1c-704870317a49-kube-api-access-jvr7m\") pod \"kube-state-metrics-0\" (UID: \"c05f2aa3-2568-45fa-ad1c-704870317a49\") " pod="openstack/kube-state-metrics-0" Oct 03 08:58:54 crc kubenswrapper[4899]: I1003 08:58:54.098734 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 08:58:54 crc kubenswrapper[4899]: I1003 08:58:54.541029 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6a2ca35-de4c-429e-9217-5047b31741ad" path="/var/lib/kubelet/pods/d6a2ca35-de4c-429e-9217-5047b31741ad/volumes" Oct 03 08:58:54 crc kubenswrapper[4899]: I1003 08:58:54.566650 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 08:58:54 crc kubenswrapper[4899]: W1003 08:58:54.574671 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc05f2aa3_2568_45fa_ad1c_704870317a49.slice/crio-501c802bbc9b802c79270241c45ba55694a476f1857b68d1c8019b2bff382670 WatchSource:0}: Error finding container 501c802bbc9b802c79270241c45ba55694a476f1857b68d1c8019b2bff382670: Status 404 returned error can't find the container with id 501c802bbc9b802c79270241c45ba55694a476f1857b68d1c8019b2bff382670 Oct 03 08:58:54 crc kubenswrapper[4899]: I1003 08:58:54.721563 4899 generic.go:334] "Generic (PLEG): container finished" podID="cf7512b4-89b9-40c2-b02b-7df3a1732d86" containerID="7fd5a07c9e29c1b6bbaa821b320d50bbcef3bddbc61653f5b213a8357fbdf150" exitCode=0 Oct 03 08:58:54 crc kubenswrapper[4899]: I1003 08:58:54.721637 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cf7512b4-89b9-40c2-b02b-7df3a1732d86","Type":"ContainerDied","Data":"7fd5a07c9e29c1b6bbaa821b320d50bbcef3bddbc61653f5b213a8357fbdf150"} Oct 03 08:58:54 crc kubenswrapper[4899]: I1003 08:58:54.725163 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c05f2aa3-2568-45fa-ad1c-704870317a49","Type":"ContainerStarted","Data":"501c802bbc9b802c79270241c45ba55694a476f1857b68d1c8019b2bff382670"} Oct 03 08:58:54 crc kubenswrapper[4899]: I1003 08:58:54.885178 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:58:54 crc kubenswrapper[4899]: I1003 08:58:54.885452 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerName="ceilometer-central-agent" containerID="cri-o://6001d54af0e93a4e9dcd5815009d06519c4ea2e1817dc5bc5ffc057d4a40d01b" gracePeriod=30 Oct 03 08:58:54 crc kubenswrapper[4899]: I1003 08:58:54.885603 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerName="proxy-httpd" containerID="cri-o://f5a8aaa3c7949b91ff3740ff91eb325a0e3d9c6ee500f3e2f81e939589ffa6c5" gracePeriod=30 Oct 03 08:58:54 crc kubenswrapper[4899]: I1003 08:58:54.885655 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerName="sg-core" containerID="cri-o://47c74d771242b8f8264859afbb6e6db38ef03dcaf80955908d72372df7f3d0d1" gracePeriod=30 Oct 03 08:58:54 crc kubenswrapper[4899]: I1003 08:58:54.885693 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerName="ceilometer-notification-agent" containerID="cri-o://fb7c18b6e87c4efb15351aff3c6e54a4e8c1d9f3997d89ffe943ba314389da83" gracePeriod=30 Oct 03 08:58:54 crc kubenswrapper[4899]: I1003 08:58:54.973042 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.093568 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf7512b4-89b9-40c2-b02b-7df3a1732d86-config-data\") pod \"cf7512b4-89b9-40c2-b02b-7df3a1732d86\" (UID: \"cf7512b4-89b9-40c2-b02b-7df3a1732d86\") " Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.093973 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6mrv\" (UniqueName: \"kubernetes.io/projected/cf7512b4-89b9-40c2-b02b-7df3a1732d86-kube-api-access-p6mrv\") pod \"cf7512b4-89b9-40c2-b02b-7df3a1732d86\" (UID: \"cf7512b4-89b9-40c2-b02b-7df3a1732d86\") " Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.094029 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf7512b4-89b9-40c2-b02b-7df3a1732d86-combined-ca-bundle\") pod \"cf7512b4-89b9-40c2-b02b-7df3a1732d86\" (UID: \"cf7512b4-89b9-40c2-b02b-7df3a1732d86\") " Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.099947 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf7512b4-89b9-40c2-b02b-7df3a1732d86-kube-api-access-p6mrv" (OuterVolumeSpecName: "kube-api-access-p6mrv") pod "cf7512b4-89b9-40c2-b02b-7df3a1732d86" (UID: "cf7512b4-89b9-40c2-b02b-7df3a1732d86"). InnerVolumeSpecName "kube-api-access-p6mrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.121541 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf7512b4-89b9-40c2-b02b-7df3a1732d86-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf7512b4-89b9-40c2-b02b-7df3a1732d86" (UID: "cf7512b4-89b9-40c2-b02b-7df3a1732d86"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.124987 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf7512b4-89b9-40c2-b02b-7df3a1732d86-config-data" (OuterVolumeSpecName: "config-data") pod "cf7512b4-89b9-40c2-b02b-7df3a1732d86" (UID: "cf7512b4-89b9-40c2-b02b-7df3a1732d86"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.164447 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.196428 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf7512b4-89b9-40c2-b02b-7df3a1732d86-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.196466 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6mrv\" (UniqueName: \"kubernetes.io/projected/cf7512b4-89b9-40c2-b02b-7df3a1732d86-kube-api-access-p6mrv\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.196477 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf7512b4-89b9-40c2-b02b-7df3a1732d86-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.736472 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c05f2aa3-2568-45fa-ad1c-704870317a49","Type":"ContainerStarted","Data":"7c0462dc5bf4098e664005e1abb80037bbd7fd09e2a1702bff05a9162782b8b9"} Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.737396 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.738813 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cf7512b4-89b9-40c2-b02b-7df3a1732d86","Type":"ContainerDied","Data":"c3873e1442be42f0812fbc02121932996760cf685edc74657c0e167c5ceaccc5"} Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.738841 4899 scope.go:117] "RemoveContainer" containerID="7fd5a07c9e29c1b6bbaa821b320d50bbcef3bddbc61653f5b213a8357fbdf150" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.738921 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.753698 4899 generic.go:334] "Generic (PLEG): container finished" podID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerID="f5a8aaa3c7949b91ff3740ff91eb325a0e3d9c6ee500f3e2f81e939589ffa6c5" exitCode=0 Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.753730 4899 generic.go:334] "Generic (PLEG): container finished" podID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerID="47c74d771242b8f8264859afbb6e6db38ef03dcaf80955908d72372df7f3d0d1" exitCode=2 Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.753740 4899 generic.go:334] "Generic (PLEG): container finished" podID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerID="6001d54af0e93a4e9dcd5815009d06519c4ea2e1817dc5bc5ffc057d4a40d01b" exitCode=0 Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.753743 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"833e2f10-2b54-4a61-a4ff-6e295668bca9","Type":"ContainerDied","Data":"f5a8aaa3c7949b91ff3740ff91eb325a0e3d9c6ee500f3e2f81e939589ffa6c5"} Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.753790 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"833e2f10-2b54-4a61-a4ff-6e295668bca9","Type":"ContainerDied","Data":"47c74d771242b8f8264859afbb6e6db38ef03dcaf80955908d72372df7f3d0d1"} Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.753800 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"833e2f10-2b54-4a61-a4ff-6e295668bca9","Type":"ContainerDied","Data":"6001d54af0e93a4e9dcd5815009d06519c4ea2e1817dc5bc5ffc057d4a40d01b"} Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.771844 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.314829183 podStartE2EDuration="2.771829856s" podCreationTimestamp="2025-10-03 08:58:53 +0000 UTC" firstStartedPulling="2025-10-03 08:58:54.577105972 +0000 UTC m=+1108.684590925" lastFinishedPulling="2025-10-03 08:58:55.034106645 +0000 UTC m=+1109.141591598" observedRunningTime="2025-10-03 08:58:55.764256967 +0000 UTC m=+1109.871741910" watchObservedRunningTime="2025-10-03 08:58:55.771829856 +0000 UTC m=+1109.879314809" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.783978 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.792393 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.801679 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 08:58:55 crc kubenswrapper[4899]: E1003 08:58:55.802129 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf7512b4-89b9-40c2-b02b-7df3a1732d86" containerName="nova-scheduler-scheduler" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.802148 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf7512b4-89b9-40c2-b02b-7df3a1732d86" containerName="nova-scheduler-scheduler" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.802311 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf7512b4-89b9-40c2-b02b-7df3a1732d86" containerName="nova-scheduler-scheduler" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.802942 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.808274 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.816948 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.910867 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgdpk\" (UniqueName: \"kubernetes.io/projected/b3ab645e-4533-4b5f-851f-5a8b09c6dda3-kube-api-access-sgdpk\") pod \"nova-scheduler-0\" (UID: \"b3ab645e-4533-4b5f-851f-5a8b09c6dda3\") " pod="openstack/nova-scheduler-0" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.911984 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ab645e-4533-4b5f-851f-5a8b09c6dda3-config-data\") pod \"nova-scheduler-0\" (UID: \"b3ab645e-4533-4b5f-851f-5a8b09c6dda3\") " pod="openstack/nova-scheduler-0" Oct 03 08:58:55 crc kubenswrapper[4899]: I1003 08:58:55.912168 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ab645e-4533-4b5f-851f-5a8b09c6dda3-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b3ab645e-4533-4b5f-851f-5a8b09c6dda3\") " pod="openstack/nova-scheduler-0" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.014392 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgdpk\" (UniqueName: \"kubernetes.io/projected/b3ab645e-4533-4b5f-851f-5a8b09c6dda3-kube-api-access-sgdpk\") pod \"nova-scheduler-0\" (UID: \"b3ab645e-4533-4b5f-851f-5a8b09c6dda3\") " pod="openstack/nova-scheduler-0" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.014765 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ab645e-4533-4b5f-851f-5a8b09c6dda3-config-data\") pod \"nova-scheduler-0\" (UID: \"b3ab645e-4533-4b5f-851f-5a8b09c6dda3\") " pod="openstack/nova-scheduler-0" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.014847 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ab645e-4533-4b5f-851f-5a8b09c6dda3-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b3ab645e-4533-4b5f-851f-5a8b09c6dda3\") " pod="openstack/nova-scheduler-0" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.021615 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ab645e-4533-4b5f-851f-5a8b09c6dda3-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b3ab645e-4533-4b5f-851f-5a8b09c6dda3\") " pod="openstack/nova-scheduler-0" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.021659 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ab645e-4533-4b5f-851f-5a8b09c6dda3-config-data\") pod \"nova-scheduler-0\" (UID: \"b3ab645e-4533-4b5f-851f-5a8b09c6dda3\") " pod="openstack/nova-scheduler-0" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.029373 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgdpk\" (UniqueName: \"kubernetes.io/projected/b3ab645e-4533-4b5f-851f-5a8b09c6dda3-kube-api-access-sgdpk\") pod \"nova-scheduler-0\" (UID: \"b3ab645e-4533-4b5f-851f-5a8b09c6dda3\") " pod="openstack/nova-scheduler-0" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.125259 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.512529 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.543314 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf7512b4-89b9-40c2-b02b-7df3a1732d86" path="/var/lib/kubelet/pods/cf7512b4-89b9-40c2-b02b-7df3a1732d86/volumes" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.626421 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vknds\" (UniqueName: \"kubernetes.io/projected/5e6940c0-2f3d-4053-931b-11d67921c897-kube-api-access-vknds\") pod \"5e6940c0-2f3d-4053-931b-11d67921c897\" (UID: \"5e6940c0-2f3d-4053-931b-11d67921c897\") " Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.626596 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e6940c0-2f3d-4053-931b-11d67921c897-config-data\") pod \"5e6940c0-2f3d-4053-931b-11d67921c897\" (UID: \"5e6940c0-2f3d-4053-931b-11d67921c897\") " Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.626643 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e6940c0-2f3d-4053-931b-11d67921c897-logs\") pod \"5e6940c0-2f3d-4053-931b-11d67921c897\" (UID: \"5e6940c0-2f3d-4053-931b-11d67921c897\") " Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.626716 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e6940c0-2f3d-4053-931b-11d67921c897-combined-ca-bundle\") pod \"5e6940c0-2f3d-4053-931b-11d67921c897\" (UID: \"5e6940c0-2f3d-4053-931b-11d67921c897\") " Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.627237 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e6940c0-2f3d-4053-931b-11d67921c897-logs" (OuterVolumeSpecName: "logs") pod "5e6940c0-2f3d-4053-931b-11d67921c897" (UID: "5e6940c0-2f3d-4053-931b-11d67921c897"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.632886 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e6940c0-2f3d-4053-931b-11d67921c897-kube-api-access-vknds" (OuterVolumeSpecName: "kube-api-access-vknds") pod "5e6940c0-2f3d-4053-931b-11d67921c897" (UID: "5e6940c0-2f3d-4053-931b-11d67921c897"). InnerVolumeSpecName "kube-api-access-vknds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.638564 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.658706 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e6940c0-2f3d-4053-931b-11d67921c897-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5e6940c0-2f3d-4053-931b-11d67921c897" (UID: "5e6940c0-2f3d-4053-931b-11d67921c897"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.659567 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e6940c0-2f3d-4053-931b-11d67921c897-config-data" (OuterVolumeSpecName: "config-data") pod "5e6940c0-2f3d-4053-931b-11d67921c897" (UID: "5e6940c0-2f3d-4053-931b-11d67921c897"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.729944 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vknds\" (UniqueName: \"kubernetes.io/projected/5e6940c0-2f3d-4053-931b-11d67921c897-kube-api-access-vknds\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.729979 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e6940c0-2f3d-4053-931b-11d67921c897-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.729989 4899 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e6940c0-2f3d-4053-931b-11d67921c897-logs\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.729998 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e6940c0-2f3d-4053-931b-11d67921c897-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.763584 4899 generic.go:334] "Generic (PLEG): container finished" podID="5e6940c0-2f3d-4053-931b-11d67921c897" containerID="2cc28efb223b746b416a13f10e66e28b807d782af405476ccfb2136eb92eac65" exitCode=0 Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.763628 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.763628 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5e6940c0-2f3d-4053-931b-11d67921c897","Type":"ContainerDied","Data":"2cc28efb223b746b416a13f10e66e28b807d782af405476ccfb2136eb92eac65"} Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.763696 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5e6940c0-2f3d-4053-931b-11d67921c897","Type":"ContainerDied","Data":"4cc46e1a053783ea1f1f53f210a72f2f190151890b8de8946d27daeca231a33e"} Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.763714 4899 scope.go:117] "RemoveContainer" containerID="2cc28efb223b746b416a13f10e66e28b807d782af405476ccfb2136eb92eac65" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.765145 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b3ab645e-4533-4b5f-851f-5a8b09c6dda3","Type":"ContainerStarted","Data":"97187cbf9c95e31b16fdfda3a9c24b9b63c4c3f1f270723f8766fdbe8bc18f00"} Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.794212 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.794390 4899 scope.go:117] "RemoveContainer" containerID="7077f28a79033eb32000208b0921b073331080009398483f357b3371729b78c7" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.807467 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.824832 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 08:58:56 crc kubenswrapper[4899]: E1003 08:58:56.836430 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e6940c0-2f3d-4053-931b-11d67921c897" containerName="nova-api-log" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.836465 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e6940c0-2f3d-4053-931b-11d67921c897" containerName="nova-api-log" Oct 03 08:58:56 crc kubenswrapper[4899]: E1003 08:58:56.836512 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e6940c0-2f3d-4053-931b-11d67921c897" containerName="nova-api-api" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.836520 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e6940c0-2f3d-4053-931b-11d67921c897" containerName="nova-api-api" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.836699 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e6940c0-2f3d-4053-931b-11d67921c897" containerName="nova-api-log" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.836712 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e6940c0-2f3d-4053-931b-11d67921c897" containerName="nova-api-api" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.838306 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.840968 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.847603 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.867577 4899 scope.go:117] "RemoveContainer" containerID="2cc28efb223b746b416a13f10e66e28b807d782af405476ccfb2136eb92eac65" Oct 03 08:58:56 crc kubenswrapper[4899]: E1003 08:58:56.880024 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2cc28efb223b746b416a13f10e66e28b807d782af405476ccfb2136eb92eac65\": container with ID starting with 2cc28efb223b746b416a13f10e66e28b807d782af405476ccfb2136eb92eac65 not found: ID does not exist" containerID="2cc28efb223b746b416a13f10e66e28b807d782af405476ccfb2136eb92eac65" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.880274 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2cc28efb223b746b416a13f10e66e28b807d782af405476ccfb2136eb92eac65"} err="failed to get container status \"2cc28efb223b746b416a13f10e66e28b807d782af405476ccfb2136eb92eac65\": rpc error: code = NotFound desc = could not find container \"2cc28efb223b746b416a13f10e66e28b807d782af405476ccfb2136eb92eac65\": container with ID starting with 2cc28efb223b746b416a13f10e66e28b807d782af405476ccfb2136eb92eac65 not found: ID does not exist" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.880371 4899 scope.go:117] "RemoveContainer" containerID="7077f28a79033eb32000208b0921b073331080009398483f357b3371729b78c7" Oct 03 08:58:56 crc kubenswrapper[4899]: E1003 08:58:56.882735 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7077f28a79033eb32000208b0921b073331080009398483f357b3371729b78c7\": container with ID starting with 7077f28a79033eb32000208b0921b073331080009398483f357b3371729b78c7 not found: ID does not exist" containerID="7077f28a79033eb32000208b0921b073331080009398483f357b3371729b78c7" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.882787 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7077f28a79033eb32000208b0921b073331080009398483f357b3371729b78c7"} err="failed to get container status \"7077f28a79033eb32000208b0921b073331080009398483f357b3371729b78c7\": rpc error: code = NotFound desc = could not find container \"7077f28a79033eb32000208b0921b073331080009398483f357b3371729b78c7\": container with ID starting with 7077f28a79033eb32000208b0921b073331080009398483f357b3371729b78c7 not found: ID does not exist" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.934182 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/236db178-47ce-42a1-bc8e-e3c5fa287ab8-logs\") pod \"nova-api-0\" (UID: \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\") " pod="openstack/nova-api-0" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.934279 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/236db178-47ce-42a1-bc8e-e3c5fa287ab8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\") " pod="openstack/nova-api-0" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.934362 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/236db178-47ce-42a1-bc8e-e3c5fa287ab8-config-data\") pod \"nova-api-0\" (UID: \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\") " pod="openstack/nova-api-0" Oct 03 08:58:56 crc kubenswrapper[4899]: I1003 08:58:56.934401 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vq8p8\" (UniqueName: \"kubernetes.io/projected/236db178-47ce-42a1-bc8e-e3c5fa287ab8-kube-api-access-vq8p8\") pod \"nova-api-0\" (UID: \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\") " pod="openstack/nova-api-0" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.035771 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/236db178-47ce-42a1-bc8e-e3c5fa287ab8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\") " pod="openstack/nova-api-0" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.035822 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/236db178-47ce-42a1-bc8e-e3c5fa287ab8-config-data\") pod \"nova-api-0\" (UID: \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\") " pod="openstack/nova-api-0" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.035865 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vq8p8\" (UniqueName: \"kubernetes.io/projected/236db178-47ce-42a1-bc8e-e3c5fa287ab8-kube-api-access-vq8p8\") pod \"nova-api-0\" (UID: \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\") " pod="openstack/nova-api-0" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.035954 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/236db178-47ce-42a1-bc8e-e3c5fa287ab8-logs\") pod \"nova-api-0\" (UID: \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\") " pod="openstack/nova-api-0" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.040739 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/236db178-47ce-42a1-bc8e-e3c5fa287ab8-logs\") pod \"nova-api-0\" (UID: \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\") " pod="openstack/nova-api-0" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.046124 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/236db178-47ce-42a1-bc8e-e3c5fa287ab8-config-data\") pod \"nova-api-0\" (UID: \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\") " pod="openstack/nova-api-0" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.051213 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/236db178-47ce-42a1-bc8e-e3c5fa287ab8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\") " pod="openstack/nova-api-0" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.056143 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vq8p8\" (UniqueName: \"kubernetes.io/projected/236db178-47ce-42a1-bc8e-e3c5fa287ab8-kube-api-access-vq8p8\") pod \"nova-api-0\" (UID: \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\") " pod="openstack/nova-api-0" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.156417 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.659332 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.666439 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:58:57 crc kubenswrapper[4899]: W1003 08:58:57.673195 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod236db178_47ce_42a1_bc8e_e3c5fa287ab8.slice/crio-39d8f683f2219659143453a21af3bf7847ea1f1428a08f03e09bd8daa2a8a86a WatchSource:0}: Error finding container 39d8f683f2219659143453a21af3bf7847ea1f1428a08f03e09bd8daa2a8a86a: Status 404 returned error can't find the container with id 39d8f683f2219659143453a21af3bf7847ea1f1428a08f03e09bd8daa2a8a86a Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.762679 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/833e2f10-2b54-4a61-a4ff-6e295668bca9-run-httpd\") pod \"833e2f10-2b54-4a61-a4ff-6e295668bca9\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.762752 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-sg-core-conf-yaml\") pod \"833e2f10-2b54-4a61-a4ff-6e295668bca9\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.762816 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-scripts\") pod \"833e2f10-2b54-4a61-a4ff-6e295668bca9\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.762873 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/833e2f10-2b54-4a61-a4ff-6e295668bca9-log-httpd\") pod \"833e2f10-2b54-4a61-a4ff-6e295668bca9\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.762992 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-combined-ca-bundle\") pod \"833e2f10-2b54-4a61-a4ff-6e295668bca9\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.763031 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-config-data\") pod \"833e2f10-2b54-4a61-a4ff-6e295668bca9\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.763058 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcjzk\" (UniqueName: \"kubernetes.io/projected/833e2f10-2b54-4a61-a4ff-6e295668bca9-kube-api-access-tcjzk\") pod \"833e2f10-2b54-4a61-a4ff-6e295668bca9\" (UID: \"833e2f10-2b54-4a61-a4ff-6e295668bca9\") " Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.771417 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/833e2f10-2b54-4a61-a4ff-6e295668bca9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "833e2f10-2b54-4a61-a4ff-6e295668bca9" (UID: "833e2f10-2b54-4a61-a4ff-6e295668bca9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.778443 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/833e2f10-2b54-4a61-a4ff-6e295668bca9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "833e2f10-2b54-4a61-a4ff-6e295668bca9" (UID: "833e2f10-2b54-4a61-a4ff-6e295668bca9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.778811 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-scripts" (OuterVolumeSpecName: "scripts") pod "833e2f10-2b54-4a61-a4ff-6e295668bca9" (UID: "833e2f10-2b54-4a61-a4ff-6e295668bca9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.789110 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/833e2f10-2b54-4a61-a4ff-6e295668bca9-kube-api-access-tcjzk" (OuterVolumeSpecName: "kube-api-access-tcjzk") pod "833e2f10-2b54-4a61-a4ff-6e295668bca9" (UID: "833e2f10-2b54-4a61-a4ff-6e295668bca9"). InnerVolumeSpecName "kube-api-access-tcjzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.865317 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.865358 4899 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/833e2f10-2b54-4a61-a4ff-6e295668bca9-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.865580 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcjzk\" (UniqueName: \"kubernetes.io/projected/833e2f10-2b54-4a61-a4ff-6e295668bca9-kube-api-access-tcjzk\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.865594 4899 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/833e2f10-2b54-4a61-a4ff-6e295668bca9-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.877114 4899 generic.go:334] "Generic (PLEG): container finished" podID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerID="fb7c18b6e87c4efb15351aff3c6e54a4e8c1d9f3997d89ffe943ba314389da83" exitCode=0 Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.877196 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.877209 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"833e2f10-2b54-4a61-a4ff-6e295668bca9","Type":"ContainerDied","Data":"fb7c18b6e87c4efb15351aff3c6e54a4e8c1d9f3997d89ffe943ba314389da83"} Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.877234 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"833e2f10-2b54-4a61-a4ff-6e295668bca9","Type":"ContainerDied","Data":"d1bb7ba3b350b36e9a3623279cb7d5a1a44fc43b9cd3c3403e206e89058c8255"} Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.877250 4899 scope.go:117] "RemoveContainer" containerID="f5a8aaa3c7949b91ff3740ff91eb325a0e3d9c6ee500f3e2f81e939589ffa6c5" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.910174 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"236db178-47ce-42a1-bc8e-e3c5fa287ab8","Type":"ContainerStarted","Data":"39d8f683f2219659143453a21af3bf7847ea1f1428a08f03e09bd8daa2a8a86a"} Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.910338 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "833e2f10-2b54-4a61-a4ff-6e295668bca9" (UID: "833e2f10-2b54-4a61-a4ff-6e295668bca9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.910417 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "833e2f10-2b54-4a61-a4ff-6e295668bca9" (UID: "833e2f10-2b54-4a61-a4ff-6e295668bca9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.925468 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b3ab645e-4533-4b5f-851f-5a8b09c6dda3","Type":"ContainerStarted","Data":"5c2c2bf6d0ed04aa16524f3855c7ab578c8cd5e352a691f3ee0d7ce71472e469"} Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.933818 4899 scope.go:117] "RemoveContainer" containerID="47c74d771242b8f8264859afbb6e6db38ef03dcaf80955908d72372df7f3d0d1" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.953971 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.953955349 podStartE2EDuration="2.953955349s" podCreationTimestamp="2025-10-03 08:58:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:58:57.950879212 +0000 UTC m=+1112.058364165" watchObservedRunningTime="2025-10-03 08:58:57.953955349 +0000 UTC m=+1112.061440302" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.967979 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.968012 4899 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.976175 4899 scope.go:117] "RemoveContainer" containerID="fb7c18b6e87c4efb15351aff3c6e54a4e8c1d9f3997d89ffe943ba314389da83" Oct 03 08:58:57 crc kubenswrapper[4899]: I1003 08:58:57.997240 4899 scope.go:117] "RemoveContainer" containerID="6001d54af0e93a4e9dcd5815009d06519c4ea2e1817dc5bc5ffc057d4a40d01b" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.016787 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-config-data" (OuterVolumeSpecName: "config-data") pod "833e2f10-2b54-4a61-a4ff-6e295668bca9" (UID: "833e2f10-2b54-4a61-a4ff-6e295668bca9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.028351 4899 scope.go:117] "RemoveContainer" containerID="f5a8aaa3c7949b91ff3740ff91eb325a0e3d9c6ee500f3e2f81e939589ffa6c5" Oct 03 08:58:58 crc kubenswrapper[4899]: E1003 08:58:58.028977 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5a8aaa3c7949b91ff3740ff91eb325a0e3d9c6ee500f3e2f81e939589ffa6c5\": container with ID starting with f5a8aaa3c7949b91ff3740ff91eb325a0e3d9c6ee500f3e2f81e939589ffa6c5 not found: ID does not exist" containerID="f5a8aaa3c7949b91ff3740ff91eb325a0e3d9c6ee500f3e2f81e939589ffa6c5" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.029022 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5a8aaa3c7949b91ff3740ff91eb325a0e3d9c6ee500f3e2f81e939589ffa6c5"} err="failed to get container status \"f5a8aaa3c7949b91ff3740ff91eb325a0e3d9c6ee500f3e2f81e939589ffa6c5\": rpc error: code = NotFound desc = could not find container \"f5a8aaa3c7949b91ff3740ff91eb325a0e3d9c6ee500f3e2f81e939589ffa6c5\": container with ID starting with f5a8aaa3c7949b91ff3740ff91eb325a0e3d9c6ee500f3e2f81e939589ffa6c5 not found: ID does not exist" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.029043 4899 scope.go:117] "RemoveContainer" containerID="47c74d771242b8f8264859afbb6e6db38ef03dcaf80955908d72372df7f3d0d1" Oct 03 08:58:58 crc kubenswrapper[4899]: E1003 08:58:58.029340 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47c74d771242b8f8264859afbb6e6db38ef03dcaf80955908d72372df7f3d0d1\": container with ID starting with 47c74d771242b8f8264859afbb6e6db38ef03dcaf80955908d72372df7f3d0d1 not found: ID does not exist" containerID="47c74d771242b8f8264859afbb6e6db38ef03dcaf80955908d72372df7f3d0d1" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.029394 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47c74d771242b8f8264859afbb6e6db38ef03dcaf80955908d72372df7f3d0d1"} err="failed to get container status \"47c74d771242b8f8264859afbb6e6db38ef03dcaf80955908d72372df7f3d0d1\": rpc error: code = NotFound desc = could not find container \"47c74d771242b8f8264859afbb6e6db38ef03dcaf80955908d72372df7f3d0d1\": container with ID starting with 47c74d771242b8f8264859afbb6e6db38ef03dcaf80955908d72372df7f3d0d1 not found: ID does not exist" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.029426 4899 scope.go:117] "RemoveContainer" containerID="fb7c18b6e87c4efb15351aff3c6e54a4e8c1d9f3997d89ffe943ba314389da83" Oct 03 08:58:58 crc kubenswrapper[4899]: E1003 08:58:58.030395 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb7c18b6e87c4efb15351aff3c6e54a4e8c1d9f3997d89ffe943ba314389da83\": container with ID starting with fb7c18b6e87c4efb15351aff3c6e54a4e8c1d9f3997d89ffe943ba314389da83 not found: ID does not exist" containerID="fb7c18b6e87c4efb15351aff3c6e54a4e8c1d9f3997d89ffe943ba314389da83" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.030424 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb7c18b6e87c4efb15351aff3c6e54a4e8c1d9f3997d89ffe943ba314389da83"} err="failed to get container status \"fb7c18b6e87c4efb15351aff3c6e54a4e8c1d9f3997d89ffe943ba314389da83\": rpc error: code = NotFound desc = could not find container \"fb7c18b6e87c4efb15351aff3c6e54a4e8c1d9f3997d89ffe943ba314389da83\": container with ID starting with fb7c18b6e87c4efb15351aff3c6e54a4e8c1d9f3997d89ffe943ba314389da83 not found: ID does not exist" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.030463 4899 scope.go:117] "RemoveContainer" containerID="6001d54af0e93a4e9dcd5815009d06519c4ea2e1817dc5bc5ffc057d4a40d01b" Oct 03 08:58:58 crc kubenswrapper[4899]: E1003 08:58:58.032350 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6001d54af0e93a4e9dcd5815009d06519c4ea2e1817dc5bc5ffc057d4a40d01b\": container with ID starting with 6001d54af0e93a4e9dcd5815009d06519c4ea2e1817dc5bc5ffc057d4a40d01b not found: ID does not exist" containerID="6001d54af0e93a4e9dcd5815009d06519c4ea2e1817dc5bc5ffc057d4a40d01b" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.032399 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6001d54af0e93a4e9dcd5815009d06519c4ea2e1817dc5bc5ffc057d4a40d01b"} err="failed to get container status \"6001d54af0e93a4e9dcd5815009d06519c4ea2e1817dc5bc5ffc057d4a40d01b\": rpc error: code = NotFound desc = could not find container \"6001d54af0e93a4e9dcd5815009d06519c4ea2e1817dc5bc5ffc057d4a40d01b\": container with ID starting with 6001d54af0e93a4e9dcd5815009d06519c4ea2e1817dc5bc5ffc057d4a40d01b not found: ID does not exist" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.069763 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/833e2f10-2b54-4a61-a4ff-6e295668bca9-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.228012 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.240386 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.250932 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:58:58 crc kubenswrapper[4899]: E1003 08:58:58.251372 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerName="sg-core" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.251395 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerName="sg-core" Oct 03 08:58:58 crc kubenswrapper[4899]: E1003 08:58:58.251415 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerName="ceilometer-notification-agent" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.251422 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerName="ceilometer-notification-agent" Oct 03 08:58:58 crc kubenswrapper[4899]: E1003 08:58:58.251439 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerName="proxy-httpd" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.251446 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerName="proxy-httpd" Oct 03 08:58:58 crc kubenswrapper[4899]: E1003 08:58:58.251471 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerName="ceilometer-central-agent" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.251476 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerName="ceilometer-central-agent" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.251659 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerName="proxy-httpd" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.251679 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerName="sg-core" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.251704 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerName="ceilometer-central-agent" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.251716 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="833e2f10-2b54-4a61-a4ff-6e295668bca9" containerName="ceilometer-notification-agent" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.265648 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.265764 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.269236 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.269498 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.278438 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.380180 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.380218 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0057a378-f37c-473f-a1cb-9c9f1059a3c3-log-httpd\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.380249 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-scripts\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.380332 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.380372 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-config-data\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.380420 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.380465 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0057a378-f37c-473f-a1cb-9c9f1059a3c3-run-httpd\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.380501 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmklh\" (UniqueName: \"kubernetes.io/projected/0057a378-f37c-473f-a1cb-9c9f1059a3c3-kube-api-access-rmklh\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.481761 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.481823 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0057a378-f37c-473f-a1cb-9c9f1059a3c3-run-httpd\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.481875 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmklh\" (UniqueName: \"kubernetes.io/projected/0057a378-f37c-473f-a1cb-9c9f1059a3c3-kube-api-access-rmklh\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.481918 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.481934 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0057a378-f37c-473f-a1cb-9c9f1059a3c3-log-httpd\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.481957 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-scripts\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.482011 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.482094 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-config-data\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.482808 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0057a378-f37c-473f-a1cb-9c9f1059a3c3-run-httpd\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.483057 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0057a378-f37c-473f-a1cb-9c9f1059a3c3-log-httpd\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.487356 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.488631 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-scripts\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.488964 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-config-data\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.493273 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.497007 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.507602 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmklh\" (UniqueName: \"kubernetes.io/projected/0057a378-f37c-473f-a1cb-9c9f1059a3c3-kube-api-access-rmklh\") pod \"ceilometer-0\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.540200 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e6940c0-2f3d-4053-931b-11d67921c897" path="/var/lib/kubelet/pods/5e6940c0-2f3d-4053-931b-11d67921c897/volumes" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.541299 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="833e2f10-2b54-4a61-a4ff-6e295668bca9" path="/var/lib/kubelet/pods/833e2f10-2b54-4a61-a4ff-6e295668bca9/volumes" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.605068 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.937541 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"236db178-47ce-42a1-bc8e-e3c5fa287ab8","Type":"ContainerStarted","Data":"059dd890e6c062f7e0068bbf931f49126e60649c83ccb2ceda81fc970d951fdd"} Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.937583 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"236db178-47ce-42a1-bc8e-e3c5fa287ab8","Type":"ContainerStarted","Data":"d3d661b8d0f83e7c0dd10009cf6101421466b5fd616408bacccc39d4796348c2"} Oct 03 08:58:58 crc kubenswrapper[4899]: I1003 08:58:58.961554 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.961474274 podStartE2EDuration="2.961474274s" podCreationTimestamp="2025-10-03 08:58:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:58:58.953192912 +0000 UTC m=+1113.060677865" watchObservedRunningTime="2025-10-03 08:58:58.961474274 +0000 UTC m=+1113.068959227" Oct 03 08:58:59 crc kubenswrapper[4899]: I1003 08:58:59.040634 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:58:59 crc kubenswrapper[4899]: W1003 08:58:59.055012 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0057a378_f37c_473f_a1cb_9c9f1059a3c3.slice/crio-46b2bfa638934d4c5d26d37e58a4804cc92dcbee08dee27c67fe55c07e47bca8 WatchSource:0}: Error finding container 46b2bfa638934d4c5d26d37e58a4804cc92dcbee08dee27c67fe55c07e47bca8: Status 404 returned error can't find the container with id 46b2bfa638934d4c5d26d37e58a4804cc92dcbee08dee27c67fe55c07e47bca8 Oct 03 08:58:59 crc kubenswrapper[4899]: I1003 08:58:59.947739 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0057a378-f37c-473f-a1cb-9c9f1059a3c3","Type":"ContainerStarted","Data":"46b2bfa638934d4c5d26d37e58a4804cc92dcbee08dee27c67fe55c07e47bca8"} Oct 03 08:59:00 crc kubenswrapper[4899]: I1003 08:59:00.956932 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0057a378-f37c-473f-a1cb-9c9f1059a3c3","Type":"ContainerStarted","Data":"87f663b61409e48f0ceb5afd5c9a91df64e4746249cc9e243d55da03a5b1214b"} Oct 03 08:59:00 crc kubenswrapper[4899]: I1003 08:59:00.956975 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0057a378-f37c-473f-a1cb-9c9f1059a3c3","Type":"ContainerStarted","Data":"127bf3afbd7a7644c3abd4883d90458d455aee9bd0a44272aa18f11f410f6412"} Oct 03 08:59:01 crc kubenswrapper[4899]: I1003 08:59:01.125729 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 03 08:59:01 crc kubenswrapper[4899]: I1003 08:59:01.967202 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0057a378-f37c-473f-a1cb-9c9f1059a3c3","Type":"ContainerStarted","Data":"4e0b99d1ec94b72db81ba1f1a856af0848f4bb266bea6d2d2976fd9f215d8f74"} Oct 03 08:59:02 crc kubenswrapper[4899]: I1003 08:59:02.977174 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0057a378-f37c-473f-a1cb-9c9f1059a3c3","Type":"ContainerStarted","Data":"c5dde3e089f7348e258d69cc42cf942c0365b85c2dd7ad5228cf38c2edffb068"} Oct 03 08:59:02 crc kubenswrapper[4899]: I1003 08:59:02.977619 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 08:59:03 crc kubenswrapper[4899]: I1003 08:59:03.002384 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.4419682 podStartE2EDuration="5.002363549s" podCreationTimestamp="2025-10-03 08:58:58 +0000 UTC" firstStartedPulling="2025-10-03 08:58:59.057878927 +0000 UTC m=+1113.165363880" lastFinishedPulling="2025-10-03 08:59:02.618274276 +0000 UTC m=+1116.725759229" observedRunningTime="2025-10-03 08:59:02.998051472 +0000 UTC m=+1117.105536425" watchObservedRunningTime="2025-10-03 08:59:03.002363549 +0000 UTC m=+1117.109848522" Oct 03 08:59:04 crc kubenswrapper[4899]: I1003 08:59:04.111209 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 03 08:59:06 crc kubenswrapper[4899]: I1003 08:59:06.127220 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 03 08:59:06 crc kubenswrapper[4899]: I1003 08:59:06.183169 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 03 08:59:07 crc kubenswrapper[4899]: I1003 08:59:07.045283 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 03 08:59:07 crc kubenswrapper[4899]: I1003 08:59:07.157525 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 08:59:07 crc kubenswrapper[4899]: I1003 08:59:07.157630 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 08:59:08 crc kubenswrapper[4899]: I1003 08:59:08.241083 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="236db178-47ce-42a1-bc8e-e3c5fa287ab8" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 08:59:08 crc kubenswrapper[4899]: I1003 08:59:08.241088 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="236db178-47ce-42a1-bc8e-e3c5fa287ab8" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 08:59:12 crc kubenswrapper[4899]: I1003 08:59:12.198097 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 08:59:12 crc kubenswrapper[4899]: I1003 08:59:12.198157 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.031963 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.038392 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.065562 4899 generic.go:334] "Generic (PLEG): container finished" podID="04d02dab-0232-4bd2-98a2-daae43c06f84" containerID="d3af43aa8d78309b8fc524c976cdbe7e11e59a24735adac2686c11f2b88f1d6d" exitCode=137 Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.065636 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"04d02dab-0232-4bd2-98a2-daae43c06f84","Type":"ContainerDied","Data":"d3af43aa8d78309b8fc524c976cdbe7e11e59a24735adac2686c11f2b88f1d6d"} Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.065665 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"04d02dab-0232-4bd2-98a2-daae43c06f84","Type":"ContainerDied","Data":"6e36e25b21319c14fe4e8f596666616b40f30adccbf7c05d10906056d8238137"} Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.065684 4899 scope.go:117] "RemoveContainer" containerID="d3af43aa8d78309b8fc524c976cdbe7e11e59a24735adac2686c11f2b88f1d6d" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.065802 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.069452 4899 generic.go:334] "Generic (PLEG): container finished" podID="6b6f9dac-6727-43a1-95ae-0682aacc9bfb" containerID="3768fe238946cc8bdd5ccab541b6087cf423d175d0ee57abfc4bb8e0736e2538" exitCode=137 Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.069499 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b6f9dac-6727-43a1-95ae-0682aacc9bfb","Type":"ContainerDied","Data":"3768fe238946cc8bdd5ccab541b6087cf423d175d0ee57abfc4bb8e0736e2538"} Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.069524 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b6f9dac-6727-43a1-95ae-0682aacc9bfb","Type":"ContainerDied","Data":"5d43be55d9e6dd8dade4df7d92d612d969c0d29e74fee33da2ba17f56ef1cec9"} Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.069577 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.095398 4899 scope.go:117] "RemoveContainer" containerID="d3af43aa8d78309b8fc524c976cdbe7e11e59a24735adac2686c11f2b88f1d6d" Oct 03 08:59:14 crc kubenswrapper[4899]: E1003 08:59:14.096090 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3af43aa8d78309b8fc524c976cdbe7e11e59a24735adac2686c11f2b88f1d6d\": container with ID starting with d3af43aa8d78309b8fc524c976cdbe7e11e59a24735adac2686c11f2b88f1d6d not found: ID does not exist" containerID="d3af43aa8d78309b8fc524c976cdbe7e11e59a24735adac2686c11f2b88f1d6d" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.096139 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3af43aa8d78309b8fc524c976cdbe7e11e59a24735adac2686c11f2b88f1d6d"} err="failed to get container status \"d3af43aa8d78309b8fc524c976cdbe7e11e59a24735adac2686c11f2b88f1d6d\": rpc error: code = NotFound desc = could not find container \"d3af43aa8d78309b8fc524c976cdbe7e11e59a24735adac2686c11f2b88f1d6d\": container with ID starting with d3af43aa8d78309b8fc524c976cdbe7e11e59a24735adac2686c11f2b88f1d6d not found: ID does not exist" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.096164 4899 scope.go:117] "RemoveContainer" containerID="3768fe238946cc8bdd5ccab541b6087cf423d175d0ee57abfc4bb8e0736e2538" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.109502 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04d02dab-0232-4bd2-98a2-daae43c06f84-config-data\") pod \"04d02dab-0232-4bd2-98a2-daae43c06f84\" (UID: \"04d02dab-0232-4bd2-98a2-daae43c06f84\") " Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.109573 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-combined-ca-bundle\") pod \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\" (UID: \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\") " Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.109612 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04d02dab-0232-4bd2-98a2-daae43c06f84-combined-ca-bundle\") pod \"04d02dab-0232-4bd2-98a2-daae43c06f84\" (UID: \"04d02dab-0232-4bd2-98a2-daae43c06f84\") " Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.109652 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thqrc\" (UniqueName: \"kubernetes.io/projected/04d02dab-0232-4bd2-98a2-daae43c06f84-kube-api-access-thqrc\") pod \"04d02dab-0232-4bd2-98a2-daae43c06f84\" (UID: \"04d02dab-0232-4bd2-98a2-daae43c06f84\") " Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.109699 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-logs\") pod \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\" (UID: \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\") " Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.109750 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-config-data\") pod \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\" (UID: \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\") " Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.109837 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8w2b\" (UniqueName: \"kubernetes.io/projected/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-kube-api-access-d8w2b\") pod \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\" (UID: \"6b6f9dac-6727-43a1-95ae-0682aacc9bfb\") " Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.111095 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-logs" (OuterVolumeSpecName: "logs") pod "6b6f9dac-6727-43a1-95ae-0682aacc9bfb" (UID: "6b6f9dac-6727-43a1-95ae-0682aacc9bfb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.111536 4899 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-logs\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.115671 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-kube-api-access-d8w2b" (OuterVolumeSpecName: "kube-api-access-d8w2b") pod "6b6f9dac-6727-43a1-95ae-0682aacc9bfb" (UID: "6b6f9dac-6727-43a1-95ae-0682aacc9bfb"). InnerVolumeSpecName "kube-api-access-d8w2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.122349 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04d02dab-0232-4bd2-98a2-daae43c06f84-kube-api-access-thqrc" (OuterVolumeSpecName: "kube-api-access-thqrc") pod "04d02dab-0232-4bd2-98a2-daae43c06f84" (UID: "04d02dab-0232-4bd2-98a2-daae43c06f84"). InnerVolumeSpecName "kube-api-access-thqrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.124636 4899 scope.go:117] "RemoveContainer" containerID="1d73f93d3a8594b2e03b003ff0d89a56f05c90c15682e36e53f82f34787fe47f" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.143953 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b6f9dac-6727-43a1-95ae-0682aacc9bfb" (UID: "6b6f9dac-6727-43a1-95ae-0682aacc9bfb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.145455 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04d02dab-0232-4bd2-98a2-daae43c06f84-config-data" (OuterVolumeSpecName: "config-data") pod "04d02dab-0232-4bd2-98a2-daae43c06f84" (UID: "04d02dab-0232-4bd2-98a2-daae43c06f84"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.146396 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-config-data" (OuterVolumeSpecName: "config-data") pod "6b6f9dac-6727-43a1-95ae-0682aacc9bfb" (UID: "6b6f9dac-6727-43a1-95ae-0682aacc9bfb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.149771 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04d02dab-0232-4bd2-98a2-daae43c06f84-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04d02dab-0232-4bd2-98a2-daae43c06f84" (UID: "04d02dab-0232-4bd2-98a2-daae43c06f84"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.169047 4899 scope.go:117] "RemoveContainer" containerID="3768fe238946cc8bdd5ccab541b6087cf423d175d0ee57abfc4bb8e0736e2538" Oct 03 08:59:14 crc kubenswrapper[4899]: E1003 08:59:14.169520 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3768fe238946cc8bdd5ccab541b6087cf423d175d0ee57abfc4bb8e0736e2538\": container with ID starting with 3768fe238946cc8bdd5ccab541b6087cf423d175d0ee57abfc4bb8e0736e2538 not found: ID does not exist" containerID="3768fe238946cc8bdd5ccab541b6087cf423d175d0ee57abfc4bb8e0736e2538" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.169575 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3768fe238946cc8bdd5ccab541b6087cf423d175d0ee57abfc4bb8e0736e2538"} err="failed to get container status \"3768fe238946cc8bdd5ccab541b6087cf423d175d0ee57abfc4bb8e0736e2538\": rpc error: code = NotFound desc = could not find container \"3768fe238946cc8bdd5ccab541b6087cf423d175d0ee57abfc4bb8e0736e2538\": container with ID starting with 3768fe238946cc8bdd5ccab541b6087cf423d175d0ee57abfc4bb8e0736e2538 not found: ID does not exist" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.169607 4899 scope.go:117] "RemoveContainer" containerID="1d73f93d3a8594b2e03b003ff0d89a56f05c90c15682e36e53f82f34787fe47f" Oct 03 08:59:14 crc kubenswrapper[4899]: E1003 08:59:14.170110 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d73f93d3a8594b2e03b003ff0d89a56f05c90c15682e36e53f82f34787fe47f\": container with ID starting with 1d73f93d3a8594b2e03b003ff0d89a56f05c90c15682e36e53f82f34787fe47f not found: ID does not exist" containerID="1d73f93d3a8594b2e03b003ff0d89a56f05c90c15682e36e53f82f34787fe47f" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.170148 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d73f93d3a8594b2e03b003ff0d89a56f05c90c15682e36e53f82f34787fe47f"} err="failed to get container status \"1d73f93d3a8594b2e03b003ff0d89a56f05c90c15682e36e53f82f34787fe47f\": rpc error: code = NotFound desc = could not find container \"1d73f93d3a8594b2e03b003ff0d89a56f05c90c15682e36e53f82f34787fe47f\": container with ID starting with 1d73f93d3a8594b2e03b003ff0d89a56f05c90c15682e36e53f82f34787fe47f not found: ID does not exist" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.213025 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.213057 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04d02dab-0232-4bd2-98a2-daae43c06f84-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.213072 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thqrc\" (UniqueName: \"kubernetes.io/projected/04d02dab-0232-4bd2-98a2-daae43c06f84-kube-api-access-thqrc\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.213083 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.213095 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8w2b\" (UniqueName: \"kubernetes.io/projected/6b6f9dac-6727-43a1-95ae-0682aacc9bfb-kube-api-access-d8w2b\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.213107 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04d02dab-0232-4bd2-98a2-daae43c06f84-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.412263 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.424383 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.437169 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.450292 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.461466 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 08:59:14 crc kubenswrapper[4899]: E1003 08:59:14.461956 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04d02dab-0232-4bd2-98a2-daae43c06f84" containerName="nova-cell1-novncproxy-novncproxy" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.462001 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="04d02dab-0232-4bd2-98a2-daae43c06f84" containerName="nova-cell1-novncproxy-novncproxy" Oct 03 08:59:14 crc kubenswrapper[4899]: E1003 08:59:14.462025 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b6f9dac-6727-43a1-95ae-0682aacc9bfb" containerName="nova-metadata-metadata" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.462032 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b6f9dac-6727-43a1-95ae-0682aacc9bfb" containerName="nova-metadata-metadata" Oct 03 08:59:14 crc kubenswrapper[4899]: E1003 08:59:14.462049 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b6f9dac-6727-43a1-95ae-0682aacc9bfb" containerName="nova-metadata-log" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.462055 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b6f9dac-6727-43a1-95ae-0682aacc9bfb" containerName="nova-metadata-log" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.462244 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b6f9dac-6727-43a1-95ae-0682aacc9bfb" containerName="nova-metadata-log" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.462262 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b6f9dac-6727-43a1-95ae-0682aacc9bfb" containerName="nova-metadata-metadata" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.462281 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="04d02dab-0232-4bd2-98a2-daae43c06f84" containerName="nova-cell1-novncproxy-novncproxy" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.463270 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.470571 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.470863 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.479971 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.481249 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.483826 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.484491 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.486248 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.503631 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.529462 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.534557 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.534991 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-logs\") pod \"nova-metadata-0\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.535043 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ww4ww\" (UniqueName: \"kubernetes.io/projected/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-kube-api-access-ww4ww\") pod \"nova-metadata-0\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.535072 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.535127 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-config-data\") pod \"nova-metadata-0\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.562102 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04d02dab-0232-4bd2-98a2-daae43c06f84" path="/var/lib/kubelet/pods/04d02dab-0232-4bd2-98a2-daae43c06f84/volumes" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.563210 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b6f9dac-6727-43a1-95ae-0682aacc9bfb" path="/var/lib/kubelet/pods/6b6f9dac-6727-43a1-95ae-0682aacc9bfb/volumes" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.637259 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bdcd25b-9c6e-40d6-82d4-6af348b37c1a-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bdcd25b-9c6e-40d6-82d4-6af348b37c1a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.637320 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bdcd25b-9c6e-40d6-82d4-6af348b37c1a-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bdcd25b-9c6e-40d6-82d4-6af348b37c1a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.637347 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.638837 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bdcd25b-9c6e-40d6-82d4-6af348b37c1a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bdcd25b-9c6e-40d6-82d4-6af348b37c1a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.638956 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-logs\") pod \"nova-metadata-0\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.638989 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ww4ww\" (UniqueName: \"kubernetes.io/projected/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-kube-api-access-ww4ww\") pod \"nova-metadata-0\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.639018 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.639063 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bdcd25b-9c6e-40d6-82d4-6af348b37c1a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bdcd25b-9c6e-40d6-82d4-6af348b37c1a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.639121 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-config-data\") pod \"nova-metadata-0\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.639180 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sz7nr\" (UniqueName: \"kubernetes.io/projected/0bdcd25b-9c6e-40d6-82d4-6af348b37c1a-kube-api-access-sz7nr\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bdcd25b-9c6e-40d6-82d4-6af348b37c1a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.639441 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-logs\") pod \"nova-metadata-0\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.641903 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.642595 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-config-data\") pod \"nova-metadata-0\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.654407 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.658842 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ww4ww\" (UniqueName: \"kubernetes.io/projected/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-kube-api-access-ww4ww\") pod \"nova-metadata-0\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.740622 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bdcd25b-9c6e-40d6-82d4-6af348b37c1a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bdcd25b-9c6e-40d6-82d4-6af348b37c1a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.740690 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bdcd25b-9c6e-40d6-82d4-6af348b37c1a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bdcd25b-9c6e-40d6-82d4-6af348b37c1a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.740725 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sz7nr\" (UniqueName: \"kubernetes.io/projected/0bdcd25b-9c6e-40d6-82d4-6af348b37c1a-kube-api-access-sz7nr\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bdcd25b-9c6e-40d6-82d4-6af348b37c1a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.740756 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bdcd25b-9c6e-40d6-82d4-6af348b37c1a-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bdcd25b-9c6e-40d6-82d4-6af348b37c1a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.740782 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bdcd25b-9c6e-40d6-82d4-6af348b37c1a-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bdcd25b-9c6e-40d6-82d4-6af348b37c1a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.743673 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bdcd25b-9c6e-40d6-82d4-6af348b37c1a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bdcd25b-9c6e-40d6-82d4-6af348b37c1a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.743737 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bdcd25b-9c6e-40d6-82d4-6af348b37c1a-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bdcd25b-9c6e-40d6-82d4-6af348b37c1a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.744625 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/0bdcd25b-9c6e-40d6-82d4-6af348b37c1a-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bdcd25b-9c6e-40d6-82d4-6af348b37c1a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.745368 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bdcd25b-9c6e-40d6-82d4-6af348b37c1a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bdcd25b-9c6e-40d6-82d4-6af348b37c1a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.757371 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sz7nr\" (UniqueName: \"kubernetes.io/projected/0bdcd25b-9c6e-40d6-82d4-6af348b37c1a-kube-api-access-sz7nr\") pod \"nova-cell1-novncproxy-0\" (UID: \"0bdcd25b-9c6e-40d6-82d4-6af348b37c1a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.804831 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 08:59:14 crc kubenswrapper[4899]: I1003 08:59:14.857241 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:15 crc kubenswrapper[4899]: I1003 08:59:15.210093 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 08:59:15 crc kubenswrapper[4899]: W1003 08:59:15.212907 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6dc10dad_9e14_4f05_9519_d7c38d5a4ca6.slice/crio-f6f21c786350117bda6e83985f1e381c1a22fc34e13d8770e2492215c80945a7 WatchSource:0}: Error finding container f6f21c786350117bda6e83985f1e381c1a22fc34e13d8770e2492215c80945a7: Status 404 returned error can't find the container with id f6f21c786350117bda6e83985f1e381c1a22fc34e13d8770e2492215c80945a7 Oct 03 08:59:15 crc kubenswrapper[4899]: I1003 08:59:15.286189 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 08:59:16 crc kubenswrapper[4899]: I1003 08:59:16.093161 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6","Type":"ContainerStarted","Data":"0e203adc17a136b60fad2665a004dd0cb3c96d39e92b3d66228f7e4b1ce810e9"} Oct 03 08:59:16 crc kubenswrapper[4899]: I1003 08:59:16.093456 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6","Type":"ContainerStarted","Data":"260295fae2fc28336557bc3d1e63ffc8c8ef713ea00b862a8b4e2d4d57d351f1"} Oct 03 08:59:16 crc kubenswrapper[4899]: I1003 08:59:16.093472 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6","Type":"ContainerStarted","Data":"f6f21c786350117bda6e83985f1e381c1a22fc34e13d8770e2492215c80945a7"} Oct 03 08:59:16 crc kubenswrapper[4899]: I1003 08:59:16.095342 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0bdcd25b-9c6e-40d6-82d4-6af348b37c1a","Type":"ContainerStarted","Data":"9e430f17cdc527c1afc75b0592233ebd6dd6e1831804fe20ca412629c6d32ca8"} Oct 03 08:59:16 crc kubenswrapper[4899]: I1003 08:59:16.095392 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0bdcd25b-9c6e-40d6-82d4-6af348b37c1a","Type":"ContainerStarted","Data":"228ee7475e1f06a1d57399b6a25dc1a8c5abc90b17ba5bea0b7fd39cd415de44"} Oct 03 08:59:16 crc kubenswrapper[4899]: I1003 08:59:16.116454 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.116419978 podStartE2EDuration="2.116419978s" podCreationTimestamp="2025-10-03 08:59:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:59:16.111178713 +0000 UTC m=+1130.218663666" watchObservedRunningTime="2025-10-03 08:59:16.116419978 +0000 UTC m=+1130.223904931" Oct 03 08:59:16 crc kubenswrapper[4899]: I1003 08:59:16.135091 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.135070519 podStartE2EDuration="2.135070519s" podCreationTimestamp="2025-10-03 08:59:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:59:16.127307164 +0000 UTC m=+1130.234792127" watchObservedRunningTime="2025-10-03 08:59:16.135070519 +0000 UTC m=+1130.242555482" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.161107 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.161190 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.161730 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.161750 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.164605 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.164650 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.336709 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-4v2wj"] Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.342172 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.349140 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-4v2wj"] Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.493417 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-4v2wj\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.493482 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-config\") pod \"dnsmasq-dns-cd5cbd7b9-4v2wj\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.493524 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-4v2wj\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.493579 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tn54\" (UniqueName: \"kubernetes.io/projected/b8f5f08a-6776-49c8-8638-4225b9f222ab-kube-api-access-4tn54\") pod \"dnsmasq-dns-cd5cbd7b9-4v2wj\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.493755 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-4v2wj\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.493828 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-4v2wj\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.595086 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-4v2wj\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.595144 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-config\") pod \"dnsmasq-dns-cd5cbd7b9-4v2wj\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.595172 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-4v2wj\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.595221 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tn54\" (UniqueName: \"kubernetes.io/projected/b8f5f08a-6776-49c8-8638-4225b9f222ab-kube-api-access-4tn54\") pod \"dnsmasq-dns-cd5cbd7b9-4v2wj\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.595308 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-4v2wj\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.595336 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-4v2wj\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.596493 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-4v2wj\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.597022 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-4v2wj\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.597180 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-4v2wj\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.597239 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-config\") pod \"dnsmasq-dns-cd5cbd7b9-4v2wj\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.597564 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-4v2wj\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.614171 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tn54\" (UniqueName: \"kubernetes.io/projected/b8f5f08a-6776-49c8-8638-4225b9f222ab-kube-api-access-4tn54\") pod \"dnsmasq-dns-cd5cbd7b9-4v2wj\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:17 crc kubenswrapper[4899]: I1003 08:59:17.703463 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:18 crc kubenswrapper[4899]: I1003 08:59:18.173265 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-4v2wj"] Oct 03 08:59:18 crc kubenswrapper[4899]: W1003 08:59:18.178956 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8f5f08a_6776_49c8_8638_4225b9f222ab.slice/crio-cd1dda4790b1a954c812646ce2846638972f2ce31d3fedb5d95a909634e3d5f8 WatchSource:0}: Error finding container cd1dda4790b1a954c812646ce2846638972f2ce31d3fedb5d95a909634e3d5f8: Status 404 returned error can't find the container with id cd1dda4790b1a954c812646ce2846638972f2ce31d3fedb5d95a909634e3d5f8 Oct 03 08:59:19 crc kubenswrapper[4899]: I1003 08:59:19.124770 4899 generic.go:334] "Generic (PLEG): container finished" podID="b8f5f08a-6776-49c8-8638-4225b9f222ab" containerID="f51be7daad81c7f0cdc0e8215173d7fcef4d0d16b746151e5884c62d564b0fd2" exitCode=0 Oct 03 08:59:19 crc kubenswrapper[4899]: I1003 08:59:19.124875 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" event={"ID":"b8f5f08a-6776-49c8-8638-4225b9f222ab","Type":"ContainerDied","Data":"f51be7daad81c7f0cdc0e8215173d7fcef4d0d16b746151e5884c62d564b0fd2"} Oct 03 08:59:19 crc kubenswrapper[4899]: I1003 08:59:19.125615 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" event={"ID":"b8f5f08a-6776-49c8-8638-4225b9f222ab","Type":"ContainerStarted","Data":"cd1dda4790b1a954c812646ce2846638972f2ce31d3fedb5d95a909634e3d5f8"} Oct 03 08:59:19 crc kubenswrapper[4899]: I1003 08:59:19.260023 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:59:19 crc kubenswrapper[4899]: I1003 08:59:19.260382 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerName="ceilometer-central-agent" containerID="cri-o://87f663b61409e48f0ceb5afd5c9a91df64e4746249cc9e243d55da03a5b1214b" gracePeriod=30 Oct 03 08:59:19 crc kubenswrapper[4899]: I1003 08:59:19.260414 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerName="proxy-httpd" containerID="cri-o://c5dde3e089f7348e258d69cc42cf942c0365b85c2dd7ad5228cf38c2edffb068" gracePeriod=30 Oct 03 08:59:19 crc kubenswrapper[4899]: I1003 08:59:19.260395 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerName="sg-core" containerID="cri-o://4e0b99d1ec94b72db81ba1f1a856af0848f4bb266bea6d2d2976fd9f215d8f74" gracePeriod=30 Oct 03 08:59:19 crc kubenswrapper[4899]: I1003 08:59:19.260566 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerName="ceilometer-notification-agent" containerID="cri-o://127bf3afbd7a7644c3abd4883d90458d455aee9bd0a44272aa18f11f410f6412" gracePeriod=30 Oct 03 08:59:19 crc kubenswrapper[4899]: I1003 08:59:19.278338 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 03 08:59:19 crc kubenswrapper[4899]: I1003 08:59:19.805239 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 08:59:19 crc kubenswrapper[4899]: I1003 08:59:19.805739 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 08:59:19 crc kubenswrapper[4899]: I1003 08:59:19.820199 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 08:59:19 crc kubenswrapper[4899]: I1003 08:59:19.858291 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:20 crc kubenswrapper[4899]: I1003 08:59:20.139961 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" event={"ID":"b8f5f08a-6776-49c8-8638-4225b9f222ab","Type":"ContainerStarted","Data":"8e1af9d5ac5582fd108df81837412bf44847d6388424a15e7c8c4da11cf10682"} Oct 03 08:59:20 crc kubenswrapper[4899]: I1003 08:59:20.140272 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:20 crc kubenswrapper[4899]: I1003 08:59:20.142507 4899 generic.go:334] "Generic (PLEG): container finished" podID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerID="c5dde3e089f7348e258d69cc42cf942c0365b85c2dd7ad5228cf38c2edffb068" exitCode=0 Oct 03 08:59:20 crc kubenswrapper[4899]: I1003 08:59:20.142544 4899 generic.go:334] "Generic (PLEG): container finished" podID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerID="4e0b99d1ec94b72db81ba1f1a856af0848f4bb266bea6d2d2976fd9f215d8f74" exitCode=2 Oct 03 08:59:20 crc kubenswrapper[4899]: I1003 08:59:20.142556 4899 generic.go:334] "Generic (PLEG): container finished" podID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerID="87f663b61409e48f0ceb5afd5c9a91df64e4746249cc9e243d55da03a5b1214b" exitCode=0 Oct 03 08:59:20 crc kubenswrapper[4899]: I1003 08:59:20.143557 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0057a378-f37c-473f-a1cb-9c9f1059a3c3","Type":"ContainerDied","Data":"c5dde3e089f7348e258d69cc42cf942c0365b85c2dd7ad5228cf38c2edffb068"} Oct 03 08:59:20 crc kubenswrapper[4899]: I1003 08:59:20.143594 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0057a378-f37c-473f-a1cb-9c9f1059a3c3","Type":"ContainerDied","Data":"4e0b99d1ec94b72db81ba1f1a856af0848f4bb266bea6d2d2976fd9f215d8f74"} Oct 03 08:59:20 crc kubenswrapper[4899]: I1003 08:59:20.143605 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0057a378-f37c-473f-a1cb-9c9f1059a3c3","Type":"ContainerDied","Data":"87f663b61409e48f0ceb5afd5c9a91df64e4746249cc9e243d55da03a5b1214b"} Oct 03 08:59:20 crc kubenswrapper[4899]: I1003 08:59:20.143754 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="236db178-47ce-42a1-bc8e-e3c5fa287ab8" containerName="nova-api-log" containerID="cri-o://d3d661b8d0f83e7c0dd10009cf6101421466b5fd616408bacccc39d4796348c2" gracePeriod=30 Oct 03 08:59:20 crc kubenswrapper[4899]: I1003 08:59:20.144161 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="236db178-47ce-42a1-bc8e-e3c5fa287ab8" containerName="nova-api-api" containerID="cri-o://059dd890e6c062f7e0068bbf931f49126e60649c83ccb2ceda81fc970d951fdd" gracePeriod=30 Oct 03 08:59:20 crc kubenswrapper[4899]: I1003 08:59:20.163089 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" podStartSLOduration=3.163071236 podStartE2EDuration="3.163071236s" podCreationTimestamp="2025-10-03 08:59:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:59:20.160816405 +0000 UTC m=+1134.268301358" watchObservedRunningTime="2025-10-03 08:59:20.163071236 +0000 UTC m=+1134.270556189" Oct 03 08:59:21 crc kubenswrapper[4899]: I1003 08:59:21.153705 4899 generic.go:334] "Generic (PLEG): container finished" podID="236db178-47ce-42a1-bc8e-e3c5fa287ab8" containerID="d3d661b8d0f83e7c0dd10009cf6101421466b5fd616408bacccc39d4796348c2" exitCode=143 Oct 03 08:59:21 crc kubenswrapper[4899]: I1003 08:59:21.153819 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"236db178-47ce-42a1-bc8e-e3c5fa287ab8","Type":"ContainerDied","Data":"d3d661b8d0f83e7c0dd10009cf6101421466b5fd616408bacccc39d4796348c2"} Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.025435 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.091778 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0057a378-f37c-473f-a1cb-9c9f1059a3c3-log-httpd\") pod \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.091856 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rmklh\" (UniqueName: \"kubernetes.io/projected/0057a378-f37c-473f-a1cb-9c9f1059a3c3-kube-api-access-rmklh\") pod \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.091967 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-ceilometer-tls-certs\") pod \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.092037 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-sg-core-conf-yaml\") pod \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.092061 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-config-data\") pod \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.092102 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0057a378-f37c-473f-a1cb-9c9f1059a3c3-run-httpd\") pod \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.092194 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-combined-ca-bundle\") pod \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.092238 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-scripts\") pod \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\" (UID: \"0057a378-f37c-473f-a1cb-9c9f1059a3c3\") " Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.092530 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0057a378-f37c-473f-a1cb-9c9f1059a3c3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0057a378-f37c-473f-a1cb-9c9f1059a3c3" (UID: "0057a378-f37c-473f-a1cb-9c9f1059a3c3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.092546 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0057a378-f37c-473f-a1cb-9c9f1059a3c3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0057a378-f37c-473f-a1cb-9c9f1059a3c3" (UID: "0057a378-f37c-473f-a1cb-9c9f1059a3c3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.092818 4899 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0057a378-f37c-473f-a1cb-9c9f1059a3c3-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.092833 4899 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0057a378-f37c-473f-a1cb-9c9f1059a3c3-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.107154 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0057a378-f37c-473f-a1cb-9c9f1059a3c3-kube-api-access-rmklh" (OuterVolumeSpecName: "kube-api-access-rmklh") pod "0057a378-f37c-473f-a1cb-9c9f1059a3c3" (UID: "0057a378-f37c-473f-a1cb-9c9f1059a3c3"). InnerVolumeSpecName "kube-api-access-rmklh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.109402 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-scripts" (OuterVolumeSpecName: "scripts") pod "0057a378-f37c-473f-a1cb-9c9f1059a3c3" (UID: "0057a378-f37c-473f-a1cb-9c9f1059a3c3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.134795 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0057a378-f37c-473f-a1cb-9c9f1059a3c3" (UID: "0057a378-f37c-473f-a1cb-9c9f1059a3c3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.148113 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "0057a378-f37c-473f-a1cb-9c9f1059a3c3" (UID: "0057a378-f37c-473f-a1cb-9c9f1059a3c3"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.165413 4899 generic.go:334] "Generic (PLEG): container finished" podID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerID="127bf3afbd7a7644c3abd4883d90458d455aee9bd0a44272aa18f11f410f6412" exitCode=0 Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.165456 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0057a378-f37c-473f-a1cb-9c9f1059a3c3","Type":"ContainerDied","Data":"127bf3afbd7a7644c3abd4883d90458d455aee9bd0a44272aa18f11f410f6412"} Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.165482 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0057a378-f37c-473f-a1cb-9c9f1059a3c3","Type":"ContainerDied","Data":"46b2bfa638934d4c5d26d37e58a4804cc92dcbee08dee27c67fe55c07e47bca8"} Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.165499 4899 scope.go:117] "RemoveContainer" containerID="c5dde3e089f7348e258d69cc42cf942c0365b85c2dd7ad5228cf38c2edffb068" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.165521 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.178015 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0057a378-f37c-473f-a1cb-9c9f1059a3c3" (UID: "0057a378-f37c-473f-a1cb-9c9f1059a3c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.194550 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rmklh\" (UniqueName: \"kubernetes.io/projected/0057a378-f37c-473f-a1cb-9c9f1059a3c3-kube-api-access-rmklh\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.194581 4899 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.194590 4899 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.194601 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.194614 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.196456 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-config-data" (OuterVolumeSpecName: "config-data") pod "0057a378-f37c-473f-a1cb-9c9f1059a3c3" (UID: "0057a378-f37c-473f-a1cb-9c9f1059a3c3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.211050 4899 scope.go:117] "RemoveContainer" containerID="4e0b99d1ec94b72db81ba1f1a856af0848f4bb266bea6d2d2976fd9f215d8f74" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.228163 4899 scope.go:117] "RemoveContainer" containerID="127bf3afbd7a7644c3abd4883d90458d455aee9bd0a44272aa18f11f410f6412" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.247288 4899 scope.go:117] "RemoveContainer" containerID="87f663b61409e48f0ceb5afd5c9a91df64e4746249cc9e243d55da03a5b1214b" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.265555 4899 scope.go:117] "RemoveContainer" containerID="c5dde3e089f7348e258d69cc42cf942c0365b85c2dd7ad5228cf38c2edffb068" Oct 03 08:59:22 crc kubenswrapper[4899]: E1003 08:59:22.266256 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5dde3e089f7348e258d69cc42cf942c0365b85c2dd7ad5228cf38c2edffb068\": container with ID starting with c5dde3e089f7348e258d69cc42cf942c0365b85c2dd7ad5228cf38c2edffb068 not found: ID does not exist" containerID="c5dde3e089f7348e258d69cc42cf942c0365b85c2dd7ad5228cf38c2edffb068" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.266286 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5dde3e089f7348e258d69cc42cf942c0365b85c2dd7ad5228cf38c2edffb068"} err="failed to get container status \"c5dde3e089f7348e258d69cc42cf942c0365b85c2dd7ad5228cf38c2edffb068\": rpc error: code = NotFound desc = could not find container \"c5dde3e089f7348e258d69cc42cf942c0365b85c2dd7ad5228cf38c2edffb068\": container with ID starting with c5dde3e089f7348e258d69cc42cf942c0365b85c2dd7ad5228cf38c2edffb068 not found: ID does not exist" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.266311 4899 scope.go:117] "RemoveContainer" containerID="4e0b99d1ec94b72db81ba1f1a856af0848f4bb266bea6d2d2976fd9f215d8f74" Oct 03 08:59:22 crc kubenswrapper[4899]: E1003 08:59:22.266551 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e0b99d1ec94b72db81ba1f1a856af0848f4bb266bea6d2d2976fd9f215d8f74\": container with ID starting with 4e0b99d1ec94b72db81ba1f1a856af0848f4bb266bea6d2d2976fd9f215d8f74 not found: ID does not exist" containerID="4e0b99d1ec94b72db81ba1f1a856af0848f4bb266bea6d2d2976fd9f215d8f74" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.266648 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e0b99d1ec94b72db81ba1f1a856af0848f4bb266bea6d2d2976fd9f215d8f74"} err="failed to get container status \"4e0b99d1ec94b72db81ba1f1a856af0848f4bb266bea6d2d2976fd9f215d8f74\": rpc error: code = NotFound desc = could not find container \"4e0b99d1ec94b72db81ba1f1a856af0848f4bb266bea6d2d2976fd9f215d8f74\": container with ID starting with 4e0b99d1ec94b72db81ba1f1a856af0848f4bb266bea6d2d2976fd9f215d8f74 not found: ID does not exist" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.266731 4899 scope.go:117] "RemoveContainer" containerID="127bf3afbd7a7644c3abd4883d90458d455aee9bd0a44272aa18f11f410f6412" Oct 03 08:59:22 crc kubenswrapper[4899]: E1003 08:59:22.267223 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"127bf3afbd7a7644c3abd4883d90458d455aee9bd0a44272aa18f11f410f6412\": container with ID starting with 127bf3afbd7a7644c3abd4883d90458d455aee9bd0a44272aa18f11f410f6412 not found: ID does not exist" containerID="127bf3afbd7a7644c3abd4883d90458d455aee9bd0a44272aa18f11f410f6412" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.267254 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"127bf3afbd7a7644c3abd4883d90458d455aee9bd0a44272aa18f11f410f6412"} err="failed to get container status \"127bf3afbd7a7644c3abd4883d90458d455aee9bd0a44272aa18f11f410f6412\": rpc error: code = NotFound desc = could not find container \"127bf3afbd7a7644c3abd4883d90458d455aee9bd0a44272aa18f11f410f6412\": container with ID starting with 127bf3afbd7a7644c3abd4883d90458d455aee9bd0a44272aa18f11f410f6412 not found: ID does not exist" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.267274 4899 scope.go:117] "RemoveContainer" containerID="87f663b61409e48f0ceb5afd5c9a91df64e4746249cc9e243d55da03a5b1214b" Oct 03 08:59:22 crc kubenswrapper[4899]: E1003 08:59:22.267594 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87f663b61409e48f0ceb5afd5c9a91df64e4746249cc9e243d55da03a5b1214b\": container with ID starting with 87f663b61409e48f0ceb5afd5c9a91df64e4746249cc9e243d55da03a5b1214b not found: ID does not exist" containerID="87f663b61409e48f0ceb5afd5c9a91df64e4746249cc9e243d55da03a5b1214b" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.267621 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87f663b61409e48f0ceb5afd5c9a91df64e4746249cc9e243d55da03a5b1214b"} err="failed to get container status \"87f663b61409e48f0ceb5afd5c9a91df64e4746249cc9e243d55da03a5b1214b\": rpc error: code = NotFound desc = could not find container \"87f663b61409e48f0ceb5afd5c9a91df64e4746249cc9e243d55da03a5b1214b\": container with ID starting with 87f663b61409e48f0ceb5afd5c9a91df64e4746249cc9e243d55da03a5b1214b not found: ID does not exist" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.296643 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0057a378-f37c-473f-a1cb-9c9f1059a3c3-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.501726 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.510867 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.524642 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:59:22 crc kubenswrapper[4899]: E1003 08:59:22.527999 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerName="ceilometer-notification-agent" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.528032 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerName="ceilometer-notification-agent" Oct 03 08:59:22 crc kubenswrapper[4899]: E1003 08:59:22.528042 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerName="ceilometer-central-agent" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.528048 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerName="ceilometer-central-agent" Oct 03 08:59:22 crc kubenswrapper[4899]: E1003 08:59:22.528062 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerName="proxy-httpd" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.528071 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerName="proxy-httpd" Oct 03 08:59:22 crc kubenswrapper[4899]: E1003 08:59:22.528098 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerName="sg-core" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.528103 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerName="sg-core" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.528397 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerName="proxy-httpd" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.528413 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerName="ceilometer-central-agent" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.528423 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerName="sg-core" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.528436 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" containerName="ceilometer-notification-agent" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.530223 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.532584 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.532723 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.532596 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.542773 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0057a378-f37c-473f-a1cb-9c9f1059a3c3" path="/var/lib/kubelet/pods/0057a378-f37c-473f-a1cb-9c9f1059a3c3/volumes" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.543742 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.601834 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-scripts\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.601886 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-config-data\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.601918 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvllw\" (UniqueName: \"kubernetes.io/projected/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-kube-api-access-rvllw\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.601958 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.601982 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-run-httpd\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.602467 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-log-httpd\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.602582 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.602650 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.704633 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.704691 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-run-httpd\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.704769 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-log-httpd\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.704793 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.704813 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.704879 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-scripts\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.704922 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-config-data\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.704940 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvllw\" (UniqueName: \"kubernetes.io/projected/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-kube-api-access-rvllw\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.705279 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-run-httpd\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.709649 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.710122 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-log-httpd\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.711199 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-scripts\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.715230 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-config-data\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.715615 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.721225 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.724673 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvllw\" (UniqueName: \"kubernetes.io/projected/ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0-kube-api-access-rvllw\") pod \"ceilometer-0\" (UID: \"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0\") " pod="openstack/ceilometer-0" Oct 03 08:59:22 crc kubenswrapper[4899]: I1003 08:59:22.848052 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 08:59:23 crc kubenswrapper[4899]: I1003 08:59:23.284830 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 08:59:23 crc kubenswrapper[4899]: W1003 08:59:23.297085 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec33e983_7a1b_4ccd_9d45_ac1fc77a94a0.slice/crio-92106fc3bf087b964233359fce81e4f475be18ae6294708bfd2462c26bd7480f WatchSource:0}: Error finding container 92106fc3bf087b964233359fce81e4f475be18ae6294708bfd2462c26bd7480f: Status 404 returned error can't find the container with id 92106fc3bf087b964233359fce81e4f475be18ae6294708bfd2462c26bd7480f Oct 03 08:59:23 crc kubenswrapper[4899]: I1003 08:59:23.610927 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 08:59:23 crc kubenswrapper[4899]: I1003 08:59:23.723286 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/236db178-47ce-42a1-bc8e-e3c5fa287ab8-config-data\") pod \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\" (UID: \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\") " Oct 03 08:59:23 crc kubenswrapper[4899]: I1003 08:59:23.723350 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/236db178-47ce-42a1-bc8e-e3c5fa287ab8-combined-ca-bundle\") pod \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\" (UID: \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\") " Oct 03 08:59:23 crc kubenswrapper[4899]: I1003 08:59:23.723410 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/236db178-47ce-42a1-bc8e-e3c5fa287ab8-logs\") pod \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\" (UID: \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\") " Oct 03 08:59:23 crc kubenswrapper[4899]: I1003 08:59:23.723551 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vq8p8\" (UniqueName: \"kubernetes.io/projected/236db178-47ce-42a1-bc8e-e3c5fa287ab8-kube-api-access-vq8p8\") pod \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\" (UID: \"236db178-47ce-42a1-bc8e-e3c5fa287ab8\") " Oct 03 08:59:23 crc kubenswrapper[4899]: I1003 08:59:23.724121 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/236db178-47ce-42a1-bc8e-e3c5fa287ab8-logs" (OuterVolumeSpecName: "logs") pod "236db178-47ce-42a1-bc8e-e3c5fa287ab8" (UID: "236db178-47ce-42a1-bc8e-e3c5fa287ab8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:59:23 crc kubenswrapper[4899]: I1003 08:59:23.724561 4899 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/236db178-47ce-42a1-bc8e-e3c5fa287ab8-logs\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:23 crc kubenswrapper[4899]: I1003 08:59:23.730337 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/236db178-47ce-42a1-bc8e-e3c5fa287ab8-kube-api-access-vq8p8" (OuterVolumeSpecName: "kube-api-access-vq8p8") pod "236db178-47ce-42a1-bc8e-e3c5fa287ab8" (UID: "236db178-47ce-42a1-bc8e-e3c5fa287ab8"). InnerVolumeSpecName "kube-api-access-vq8p8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:59:23 crc kubenswrapper[4899]: I1003 08:59:23.753417 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/236db178-47ce-42a1-bc8e-e3c5fa287ab8-config-data" (OuterVolumeSpecName: "config-data") pod "236db178-47ce-42a1-bc8e-e3c5fa287ab8" (UID: "236db178-47ce-42a1-bc8e-e3c5fa287ab8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:23 crc kubenswrapper[4899]: I1003 08:59:23.755455 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/236db178-47ce-42a1-bc8e-e3c5fa287ab8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "236db178-47ce-42a1-bc8e-e3c5fa287ab8" (UID: "236db178-47ce-42a1-bc8e-e3c5fa287ab8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:23 crc kubenswrapper[4899]: I1003 08:59:23.826206 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/236db178-47ce-42a1-bc8e-e3c5fa287ab8-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:23 crc kubenswrapper[4899]: I1003 08:59:23.826241 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/236db178-47ce-42a1-bc8e-e3c5fa287ab8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:23 crc kubenswrapper[4899]: I1003 08:59:23.826252 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vq8p8\" (UniqueName: \"kubernetes.io/projected/236db178-47ce-42a1-bc8e-e3c5fa287ab8-kube-api-access-vq8p8\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.200049 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0","Type":"ContainerStarted","Data":"8b5d398ca9ae2e3d1558d3712a2633cf4d4b016810d4a21c34e229a4c1c256ff"} Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.200402 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0","Type":"ContainerStarted","Data":"92106fc3bf087b964233359fce81e4f475be18ae6294708bfd2462c26bd7480f"} Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.202149 4899 generic.go:334] "Generic (PLEG): container finished" podID="236db178-47ce-42a1-bc8e-e3c5fa287ab8" containerID="059dd890e6c062f7e0068bbf931f49126e60649c83ccb2ceda81fc970d951fdd" exitCode=0 Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.202190 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"236db178-47ce-42a1-bc8e-e3c5fa287ab8","Type":"ContainerDied","Data":"059dd890e6c062f7e0068bbf931f49126e60649c83ccb2ceda81fc970d951fdd"} Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.202202 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.202215 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"236db178-47ce-42a1-bc8e-e3c5fa287ab8","Type":"ContainerDied","Data":"39d8f683f2219659143453a21af3bf7847ea1f1428a08f03e09bd8daa2a8a86a"} Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.202231 4899 scope.go:117] "RemoveContainer" containerID="059dd890e6c062f7e0068bbf931f49126e60649c83ccb2ceda81fc970d951fdd" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.227810 4899 scope.go:117] "RemoveContainer" containerID="d3d661b8d0f83e7c0dd10009cf6101421466b5fd616408bacccc39d4796348c2" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.239271 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.254176 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.257087 4899 scope.go:117] "RemoveContainer" containerID="059dd890e6c062f7e0068bbf931f49126e60649c83ccb2ceda81fc970d951fdd" Oct 03 08:59:24 crc kubenswrapper[4899]: E1003 08:59:24.257596 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"059dd890e6c062f7e0068bbf931f49126e60649c83ccb2ceda81fc970d951fdd\": container with ID starting with 059dd890e6c062f7e0068bbf931f49126e60649c83ccb2ceda81fc970d951fdd not found: ID does not exist" containerID="059dd890e6c062f7e0068bbf931f49126e60649c83ccb2ceda81fc970d951fdd" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.257630 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"059dd890e6c062f7e0068bbf931f49126e60649c83ccb2ceda81fc970d951fdd"} err="failed to get container status \"059dd890e6c062f7e0068bbf931f49126e60649c83ccb2ceda81fc970d951fdd\": rpc error: code = NotFound desc = could not find container \"059dd890e6c062f7e0068bbf931f49126e60649c83ccb2ceda81fc970d951fdd\": container with ID starting with 059dd890e6c062f7e0068bbf931f49126e60649c83ccb2ceda81fc970d951fdd not found: ID does not exist" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.257649 4899 scope.go:117] "RemoveContainer" containerID="d3d661b8d0f83e7c0dd10009cf6101421466b5fd616408bacccc39d4796348c2" Oct 03 08:59:24 crc kubenswrapper[4899]: E1003 08:59:24.259118 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3d661b8d0f83e7c0dd10009cf6101421466b5fd616408bacccc39d4796348c2\": container with ID starting with d3d661b8d0f83e7c0dd10009cf6101421466b5fd616408bacccc39d4796348c2 not found: ID does not exist" containerID="d3d661b8d0f83e7c0dd10009cf6101421466b5fd616408bacccc39d4796348c2" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.259179 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3d661b8d0f83e7c0dd10009cf6101421466b5fd616408bacccc39d4796348c2"} err="failed to get container status \"d3d661b8d0f83e7c0dd10009cf6101421466b5fd616408bacccc39d4796348c2\": rpc error: code = NotFound desc = could not find container \"d3d661b8d0f83e7c0dd10009cf6101421466b5fd616408bacccc39d4796348c2\": container with ID starting with d3d661b8d0f83e7c0dd10009cf6101421466b5fd616408bacccc39d4796348c2 not found: ID does not exist" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.275998 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 08:59:24 crc kubenswrapper[4899]: E1003 08:59:24.276560 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="236db178-47ce-42a1-bc8e-e3c5fa287ab8" containerName="nova-api-log" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.276580 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="236db178-47ce-42a1-bc8e-e3c5fa287ab8" containerName="nova-api-log" Oct 03 08:59:24 crc kubenswrapper[4899]: E1003 08:59:24.276637 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="236db178-47ce-42a1-bc8e-e3c5fa287ab8" containerName="nova-api-api" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.276646 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="236db178-47ce-42a1-bc8e-e3c5fa287ab8" containerName="nova-api-api" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.276831 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="236db178-47ce-42a1-bc8e-e3c5fa287ab8" containerName="nova-api-log" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.276856 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="236db178-47ce-42a1-bc8e-e3c5fa287ab8" containerName="nova-api-api" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.277964 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.280118 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.280457 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.280686 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.303952 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.437597 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.438121 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.438258 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6gvj\" (UniqueName: \"kubernetes.io/projected/f81520b3-d279-4570-ab54-eea5d6e44ff5-kube-api-access-v6gvj\") pod \"nova-api-0\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.438368 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-config-data\") pod \"nova-api-0\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.438549 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-public-tls-certs\") pod \"nova-api-0\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.438594 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f81520b3-d279-4570-ab54-eea5d6e44ff5-logs\") pod \"nova-api-0\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.540736 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-public-tls-certs\") pod \"nova-api-0\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.540796 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f81520b3-d279-4570-ab54-eea5d6e44ff5-logs\") pod \"nova-api-0\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.540962 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.541037 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.541076 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6gvj\" (UniqueName: \"kubernetes.io/projected/f81520b3-d279-4570-ab54-eea5d6e44ff5-kube-api-access-v6gvj\") pod \"nova-api-0\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.541124 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-config-data\") pod \"nova-api-0\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.543326 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f81520b3-d279-4570-ab54-eea5d6e44ff5-logs\") pod \"nova-api-0\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.546026 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-config-data\") pod \"nova-api-0\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.547220 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.548718 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-public-tls-certs\") pod \"nova-api-0\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.550360 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.560799 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6gvj\" (UniqueName: \"kubernetes.io/projected/f81520b3-d279-4570-ab54-eea5d6e44ff5-kube-api-access-v6gvj\") pod \"nova-api-0\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.566781 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="236db178-47ce-42a1-bc8e-e3c5fa287ab8" path="/var/lib/kubelet/pods/236db178-47ce-42a1-bc8e-e3c5fa287ab8/volumes" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.597201 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.805993 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.806318 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.859977 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:24 crc kubenswrapper[4899]: I1003 08:59:24.894695 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.194149 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.227728 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0","Type":"ContainerStarted","Data":"c93aef699e88c4cd65a5a3e579591576a7059126bb4363f93f7c4401d3d52417"} Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.230061 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f81520b3-d279-4570-ab54-eea5d6e44ff5","Type":"ContainerStarted","Data":"b868da5444b357167c096ab3d9b8ebad9021b9c55c19156ae95150691ba1d527"} Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.252461 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.398941 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-2gxgr"] Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.400471 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2gxgr" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.405844 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.406063 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.428732 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-2gxgr"] Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.461620 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-config-data\") pod \"nova-cell1-cell-mapping-2gxgr\" (UID: \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\") " pod="openstack/nova-cell1-cell-mapping-2gxgr" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.461690 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvrjp\" (UniqueName: \"kubernetes.io/projected/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-kube-api-access-cvrjp\") pod \"nova-cell1-cell-mapping-2gxgr\" (UID: \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\") " pod="openstack/nova-cell1-cell-mapping-2gxgr" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.461817 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-2gxgr\" (UID: \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\") " pod="openstack/nova-cell1-cell-mapping-2gxgr" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.461841 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-scripts\") pod \"nova-cell1-cell-mapping-2gxgr\" (UID: \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\") " pod="openstack/nova-cell1-cell-mapping-2gxgr" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.567230 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-2gxgr\" (UID: \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\") " pod="openstack/nova-cell1-cell-mapping-2gxgr" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.567314 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-scripts\") pod \"nova-cell1-cell-mapping-2gxgr\" (UID: \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\") " pod="openstack/nova-cell1-cell-mapping-2gxgr" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.567404 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-config-data\") pod \"nova-cell1-cell-mapping-2gxgr\" (UID: \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\") " pod="openstack/nova-cell1-cell-mapping-2gxgr" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.567485 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvrjp\" (UniqueName: \"kubernetes.io/projected/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-kube-api-access-cvrjp\") pod \"nova-cell1-cell-mapping-2gxgr\" (UID: \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\") " pod="openstack/nova-cell1-cell-mapping-2gxgr" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.573074 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-2gxgr\" (UID: \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\") " pod="openstack/nova-cell1-cell-mapping-2gxgr" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.574446 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-config-data\") pod \"nova-cell1-cell-mapping-2gxgr\" (UID: \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\") " pod="openstack/nova-cell1-cell-mapping-2gxgr" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.574490 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-scripts\") pod \"nova-cell1-cell-mapping-2gxgr\" (UID: \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\") " pod="openstack/nova-cell1-cell-mapping-2gxgr" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.584288 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvrjp\" (UniqueName: \"kubernetes.io/projected/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-kube-api-access-cvrjp\") pod \"nova-cell1-cell-mapping-2gxgr\" (UID: \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\") " pod="openstack/nova-cell1-cell-mapping-2gxgr" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.727104 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2gxgr" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.849621 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6dc10dad-9e14-4f05-9519-d7c38d5a4ca6" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 08:59:25 crc kubenswrapper[4899]: I1003 08:59:25.849615 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6dc10dad-9e14-4f05-9519-d7c38d5a4ca6" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 08:59:26 crc kubenswrapper[4899]: I1003 08:59:26.245444 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f81520b3-d279-4570-ab54-eea5d6e44ff5","Type":"ContainerStarted","Data":"d515a6c688c6036634603e5a2db6a8554b9d9f03e89c6313627b2ae81f91befb"} Oct 03 08:59:26 crc kubenswrapper[4899]: I1003 08:59:26.245725 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f81520b3-d279-4570-ab54-eea5d6e44ff5","Type":"ContainerStarted","Data":"df9e22ebed3a2415c0ada8d5ae2eddde0ca334861fdf595e04d4f5a759b06ed8"} Oct 03 08:59:26 crc kubenswrapper[4899]: I1003 08:59:26.252727 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0","Type":"ContainerStarted","Data":"d69631061f496fa6f6a2aab2753a9f3f7f275b91065f24cdf6d9ef45b79cd5ca"} Oct 03 08:59:26 crc kubenswrapper[4899]: I1003 08:59:26.275282 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-2gxgr"] Oct 03 08:59:26 crc kubenswrapper[4899]: I1003 08:59:26.285923 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.28587024 podStartE2EDuration="2.28587024s" podCreationTimestamp="2025-10-03 08:59:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:59:26.269523132 +0000 UTC m=+1140.377008085" watchObservedRunningTime="2025-10-03 08:59:26.28587024 +0000 UTC m=+1140.393355193" Oct 03 08:59:27 crc kubenswrapper[4899]: I1003 08:59:27.262210 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0","Type":"ContainerStarted","Data":"cb5e21a67eeb2d6b6fc4499868690aca99aa6bac2232d8d67d7ad94eed4a8441"} Oct 03 08:59:27 crc kubenswrapper[4899]: I1003 08:59:27.262582 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 08:59:27 crc kubenswrapper[4899]: I1003 08:59:27.266216 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2gxgr" event={"ID":"a51e9ebc-665e-4ee1-bbef-935ff3835fbd","Type":"ContainerStarted","Data":"36e1f06c9f9ffb9e47277f3597b16a8d28da466b9ce08288d1f4951c2d705b1b"} Oct 03 08:59:27 crc kubenswrapper[4899]: I1003 08:59:27.266250 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2gxgr" event={"ID":"a51e9ebc-665e-4ee1-bbef-935ff3835fbd","Type":"ContainerStarted","Data":"c02c999a817176d5a6f0ddc08d43e0327d1855099fba62b2644373beac48c4a5"} Oct 03 08:59:27 crc kubenswrapper[4899]: I1003 08:59:27.287115 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.817821143 podStartE2EDuration="5.287097706s" podCreationTimestamp="2025-10-03 08:59:22 +0000 UTC" firstStartedPulling="2025-10-03 08:59:23.299583392 +0000 UTC m=+1137.407068345" lastFinishedPulling="2025-10-03 08:59:26.768859955 +0000 UTC m=+1140.876344908" observedRunningTime="2025-10-03 08:59:27.280926861 +0000 UTC m=+1141.388411814" watchObservedRunningTime="2025-10-03 08:59:27.287097706 +0000 UTC m=+1141.394582659" Oct 03 08:59:27 crc kubenswrapper[4899]: I1003 08:59:27.306229 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-2gxgr" podStartSLOduration=2.306206991 podStartE2EDuration="2.306206991s" podCreationTimestamp="2025-10-03 08:59:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:59:27.296826584 +0000 UTC m=+1141.404311537" watchObservedRunningTime="2025-10-03 08:59:27.306206991 +0000 UTC m=+1141.413691944" Oct 03 08:59:27 crc kubenswrapper[4899]: I1003 08:59:27.705090 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 08:59:27 crc kubenswrapper[4899]: I1003 08:59:27.768849 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-672tv"] Oct 03 08:59:27 crc kubenswrapper[4899]: I1003 08:59:27.769136 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bccf8f775-672tv" podUID="f81cc11b-ac94-4a5b-bd99-50f4580e5f14" containerName="dnsmasq-dns" containerID="cri-o://3d7f2ec9196dc1cb33179a8ef659e8b8362613f5d59296049139584d28e2a9a6" gracePeriod=10 Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.286601 4899 generic.go:334] "Generic (PLEG): container finished" podID="f81cc11b-ac94-4a5b-bd99-50f4580e5f14" containerID="3d7f2ec9196dc1cb33179a8ef659e8b8362613f5d59296049139584d28e2a9a6" exitCode=0 Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.286677 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-672tv" event={"ID":"f81cc11b-ac94-4a5b-bd99-50f4580e5f14","Type":"ContainerDied","Data":"3d7f2ec9196dc1cb33179a8ef659e8b8362613f5d59296049139584d28e2a9a6"} Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.380793 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.551384 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-ovsdbserver-sb\") pod \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.551467 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-dns-swift-storage-0\") pod \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.551510 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-config\") pod \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.551562 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-dns-svc\") pod \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.551659 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpxtr\" (UniqueName: \"kubernetes.io/projected/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-kube-api-access-dpxtr\") pod \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.551797 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-ovsdbserver-nb\") pod \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\" (UID: \"f81cc11b-ac94-4a5b-bd99-50f4580e5f14\") " Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.576637 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-kube-api-access-dpxtr" (OuterVolumeSpecName: "kube-api-access-dpxtr") pod "f81cc11b-ac94-4a5b-bd99-50f4580e5f14" (UID: "f81cc11b-ac94-4a5b-bd99-50f4580e5f14"). InnerVolumeSpecName "kube-api-access-dpxtr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.602319 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-config" (OuterVolumeSpecName: "config") pod "f81cc11b-ac94-4a5b-bd99-50f4580e5f14" (UID: "f81cc11b-ac94-4a5b-bd99-50f4580e5f14"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.607579 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f81cc11b-ac94-4a5b-bd99-50f4580e5f14" (UID: "f81cc11b-ac94-4a5b-bd99-50f4580e5f14"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.612943 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f81cc11b-ac94-4a5b-bd99-50f4580e5f14" (UID: "f81cc11b-ac94-4a5b-bd99-50f4580e5f14"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.623519 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f81cc11b-ac94-4a5b-bd99-50f4580e5f14" (UID: "f81cc11b-ac94-4a5b-bd99-50f4580e5f14"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.629365 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f81cc11b-ac94-4a5b-bd99-50f4580e5f14" (UID: "f81cc11b-ac94-4a5b-bd99-50f4580e5f14"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.654190 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.654217 4899 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.654227 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-config\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.654237 4899 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.654245 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpxtr\" (UniqueName: \"kubernetes.io/projected/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-kube-api-access-dpxtr\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:28 crc kubenswrapper[4899]: I1003 08:59:28.654254 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f81cc11b-ac94-4a5b-bd99-50f4580e5f14-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:29 crc kubenswrapper[4899]: I1003 08:59:29.299458 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-672tv" event={"ID":"f81cc11b-ac94-4a5b-bd99-50f4580e5f14","Type":"ContainerDied","Data":"a3d523dd4673d24e67dd40990d003fc8b207b19b9f242e39892019f380d2a33b"} Oct 03 08:59:29 crc kubenswrapper[4899]: I1003 08:59:29.299522 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-672tv" Oct 03 08:59:29 crc kubenswrapper[4899]: I1003 08:59:29.299883 4899 scope.go:117] "RemoveContainer" containerID="3d7f2ec9196dc1cb33179a8ef659e8b8362613f5d59296049139584d28e2a9a6" Oct 03 08:59:29 crc kubenswrapper[4899]: I1003 08:59:29.335098 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-672tv"] Oct 03 08:59:29 crc kubenswrapper[4899]: I1003 08:59:29.337286 4899 scope.go:117] "RemoveContainer" containerID="4503ef5a4a8033776f9afa6b352cd9607a1809b24f6cd3d2e3321a9a604a9b29" Oct 03 08:59:29 crc kubenswrapper[4899]: I1003 08:59:29.346904 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-672tv"] Oct 03 08:59:30 crc kubenswrapper[4899]: I1003 08:59:30.537771 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f81cc11b-ac94-4a5b-bd99-50f4580e5f14" path="/var/lib/kubelet/pods/f81cc11b-ac94-4a5b-bd99-50f4580e5f14/volumes" Oct 03 08:59:32 crc kubenswrapper[4899]: I1003 08:59:32.354779 4899 generic.go:334] "Generic (PLEG): container finished" podID="a51e9ebc-665e-4ee1-bbef-935ff3835fbd" containerID="36e1f06c9f9ffb9e47277f3597b16a8d28da466b9ce08288d1f4951c2d705b1b" exitCode=0 Oct 03 08:59:32 crc kubenswrapper[4899]: I1003 08:59:32.355136 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2gxgr" event={"ID":"a51e9ebc-665e-4ee1-bbef-935ff3835fbd","Type":"ContainerDied","Data":"36e1f06c9f9ffb9e47277f3597b16a8d28da466b9ce08288d1f4951c2d705b1b"} Oct 03 08:59:33 crc kubenswrapper[4899]: I1003 08:59:33.684847 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2gxgr" Oct 03 08:59:33 crc kubenswrapper[4899]: I1003 08:59:33.873296 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvrjp\" (UniqueName: \"kubernetes.io/projected/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-kube-api-access-cvrjp\") pod \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\" (UID: \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\") " Oct 03 08:59:33 crc kubenswrapper[4899]: I1003 08:59:33.873434 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-config-data\") pod \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\" (UID: \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\") " Oct 03 08:59:33 crc kubenswrapper[4899]: I1003 08:59:33.873469 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-combined-ca-bundle\") pod \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\" (UID: \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\") " Oct 03 08:59:33 crc kubenswrapper[4899]: I1003 08:59:33.873550 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-scripts\") pod \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\" (UID: \"a51e9ebc-665e-4ee1-bbef-935ff3835fbd\") " Oct 03 08:59:33 crc kubenswrapper[4899]: I1003 08:59:33.879949 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-scripts" (OuterVolumeSpecName: "scripts") pod "a51e9ebc-665e-4ee1-bbef-935ff3835fbd" (UID: "a51e9ebc-665e-4ee1-bbef-935ff3835fbd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:33 crc kubenswrapper[4899]: I1003 08:59:33.881090 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-kube-api-access-cvrjp" (OuterVolumeSpecName: "kube-api-access-cvrjp") pod "a51e9ebc-665e-4ee1-bbef-935ff3835fbd" (UID: "a51e9ebc-665e-4ee1-bbef-935ff3835fbd"). InnerVolumeSpecName "kube-api-access-cvrjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:59:33 crc kubenswrapper[4899]: I1003 08:59:33.902431 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-config-data" (OuterVolumeSpecName: "config-data") pod "a51e9ebc-665e-4ee1-bbef-935ff3835fbd" (UID: "a51e9ebc-665e-4ee1-bbef-935ff3835fbd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:33 crc kubenswrapper[4899]: I1003 08:59:33.902476 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a51e9ebc-665e-4ee1-bbef-935ff3835fbd" (UID: "a51e9ebc-665e-4ee1-bbef-935ff3835fbd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:33 crc kubenswrapper[4899]: I1003 08:59:33.977029 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvrjp\" (UniqueName: \"kubernetes.io/projected/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-kube-api-access-cvrjp\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:33 crc kubenswrapper[4899]: I1003 08:59:33.977062 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:33 crc kubenswrapper[4899]: I1003 08:59:33.977071 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:33 crc kubenswrapper[4899]: I1003 08:59:33.977079 4899 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a51e9ebc-665e-4ee1-bbef-935ff3835fbd-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:34 crc kubenswrapper[4899]: I1003 08:59:34.372391 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2gxgr" event={"ID":"a51e9ebc-665e-4ee1-bbef-935ff3835fbd","Type":"ContainerDied","Data":"c02c999a817176d5a6f0ddc08d43e0327d1855099fba62b2644373beac48c4a5"} Oct 03 08:59:34 crc kubenswrapper[4899]: I1003 08:59:34.372436 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c02c999a817176d5a6f0ddc08d43e0327d1855099fba62b2644373beac48c4a5" Oct 03 08:59:34 crc kubenswrapper[4899]: I1003 08:59:34.372531 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2gxgr" Oct 03 08:59:34 crc kubenswrapper[4899]: I1003 08:59:34.552812 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 08:59:34 crc kubenswrapper[4899]: I1003 08:59:34.553096 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f81520b3-d279-4570-ab54-eea5d6e44ff5" containerName="nova-api-log" containerID="cri-o://df9e22ebed3a2415c0ada8d5ae2eddde0ca334861fdf595e04d4f5a759b06ed8" gracePeriod=30 Oct 03 08:59:34 crc kubenswrapper[4899]: I1003 08:59:34.553215 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f81520b3-d279-4570-ab54-eea5d6e44ff5" containerName="nova-api-api" containerID="cri-o://d515a6c688c6036634603e5a2db6a8554b9d9f03e89c6313627b2ae81f91befb" gracePeriod=30 Oct 03 08:59:34 crc kubenswrapper[4899]: I1003 08:59:34.575872 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 08:59:34 crc kubenswrapper[4899]: I1003 08:59:34.576493 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="b3ab645e-4533-4b5f-851f-5a8b09c6dda3" containerName="nova-scheduler-scheduler" containerID="cri-o://5c2c2bf6d0ed04aa16524f3855c7ab578c8cd5e352a691f3ee0d7ce71472e469" gracePeriod=30 Oct 03 08:59:34 crc kubenswrapper[4899]: I1003 08:59:34.586618 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 08:59:34 crc kubenswrapper[4899]: I1003 08:59:34.586928 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6dc10dad-9e14-4f05-9519-d7c38d5a4ca6" containerName="nova-metadata-log" containerID="cri-o://260295fae2fc28336557bc3d1e63ffc8c8ef713ea00b862a8b4e2d4d57d351f1" gracePeriod=30 Oct 03 08:59:34 crc kubenswrapper[4899]: I1003 08:59:34.587013 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6dc10dad-9e14-4f05-9519-d7c38d5a4ca6" containerName="nova-metadata-metadata" containerID="cri-o://0e203adc17a136b60fad2665a004dd0cb3c96d39e92b3d66228f7e4b1ce810e9" gracePeriod=30 Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.171086 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.300222 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-combined-ca-bundle\") pod \"f81520b3-d279-4570-ab54-eea5d6e44ff5\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.300278 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f81520b3-d279-4570-ab54-eea5d6e44ff5-logs\") pod \"f81520b3-d279-4570-ab54-eea5d6e44ff5\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.300303 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6gvj\" (UniqueName: \"kubernetes.io/projected/f81520b3-d279-4570-ab54-eea5d6e44ff5-kube-api-access-v6gvj\") pod \"f81520b3-d279-4570-ab54-eea5d6e44ff5\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.300373 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-public-tls-certs\") pod \"f81520b3-d279-4570-ab54-eea5d6e44ff5\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.300523 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-internal-tls-certs\") pod \"f81520b3-d279-4570-ab54-eea5d6e44ff5\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.300599 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-config-data\") pod \"f81520b3-d279-4570-ab54-eea5d6e44ff5\" (UID: \"f81520b3-d279-4570-ab54-eea5d6e44ff5\") " Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.302061 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f81520b3-d279-4570-ab54-eea5d6e44ff5-logs" (OuterVolumeSpecName: "logs") pod "f81520b3-d279-4570-ab54-eea5d6e44ff5" (UID: "f81520b3-d279-4570-ab54-eea5d6e44ff5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.305987 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f81520b3-d279-4570-ab54-eea5d6e44ff5-kube-api-access-v6gvj" (OuterVolumeSpecName: "kube-api-access-v6gvj") pod "f81520b3-d279-4570-ab54-eea5d6e44ff5" (UID: "f81520b3-d279-4570-ab54-eea5d6e44ff5"). InnerVolumeSpecName "kube-api-access-v6gvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.333317 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-config-data" (OuterVolumeSpecName: "config-data") pod "f81520b3-d279-4570-ab54-eea5d6e44ff5" (UID: "f81520b3-d279-4570-ab54-eea5d6e44ff5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.334155 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f81520b3-d279-4570-ab54-eea5d6e44ff5" (UID: "f81520b3-d279-4570-ab54-eea5d6e44ff5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.355677 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f81520b3-d279-4570-ab54-eea5d6e44ff5" (UID: "f81520b3-d279-4570-ab54-eea5d6e44ff5"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.365730 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f81520b3-d279-4570-ab54-eea5d6e44ff5" (UID: "f81520b3-d279-4570-ab54-eea5d6e44ff5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.386394 4899 generic.go:334] "Generic (PLEG): container finished" podID="f81520b3-d279-4570-ab54-eea5d6e44ff5" containerID="d515a6c688c6036634603e5a2db6a8554b9d9f03e89c6313627b2ae81f91befb" exitCode=0 Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.386431 4899 generic.go:334] "Generic (PLEG): container finished" podID="f81520b3-d279-4570-ab54-eea5d6e44ff5" containerID="df9e22ebed3a2415c0ada8d5ae2eddde0ca334861fdf595e04d4f5a759b06ed8" exitCode=143 Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.386461 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f81520b3-d279-4570-ab54-eea5d6e44ff5","Type":"ContainerDied","Data":"d515a6c688c6036634603e5a2db6a8554b9d9f03e89c6313627b2ae81f91befb"} Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.386522 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f81520b3-d279-4570-ab54-eea5d6e44ff5","Type":"ContainerDied","Data":"df9e22ebed3a2415c0ada8d5ae2eddde0ca334861fdf595e04d4f5a759b06ed8"} Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.386539 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f81520b3-d279-4570-ab54-eea5d6e44ff5","Type":"ContainerDied","Data":"b868da5444b357167c096ab3d9b8ebad9021b9c55c19156ae95150691ba1d527"} Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.386475 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.386558 4899 scope.go:117] "RemoveContainer" containerID="d515a6c688c6036634603e5a2db6a8554b9d9f03e89c6313627b2ae81f91befb" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.388502 4899 generic.go:334] "Generic (PLEG): container finished" podID="6dc10dad-9e14-4f05-9519-d7c38d5a4ca6" containerID="260295fae2fc28336557bc3d1e63ffc8c8ef713ea00b862a8b4e2d4d57d351f1" exitCode=143 Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.388550 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6","Type":"ContainerDied","Data":"260295fae2fc28336557bc3d1e63ffc8c8ef713ea00b862a8b4e2d4d57d351f1"} Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.392037 4899 generic.go:334] "Generic (PLEG): container finished" podID="b3ab645e-4533-4b5f-851f-5a8b09c6dda3" containerID="5c2c2bf6d0ed04aa16524f3855c7ab578c8cd5e352a691f3ee0d7ce71472e469" exitCode=0 Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.392077 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b3ab645e-4533-4b5f-851f-5a8b09c6dda3","Type":"ContainerDied","Data":"5c2c2bf6d0ed04aa16524f3855c7ab578c8cd5e352a691f3ee0d7ce71472e469"} Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.402863 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.402911 4899 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f81520b3-d279-4570-ab54-eea5d6e44ff5-logs\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.402920 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6gvj\" (UniqueName: \"kubernetes.io/projected/f81520b3-d279-4570-ab54-eea5d6e44ff5-kube-api-access-v6gvj\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.402930 4899 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.402938 4899 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.402946 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f81520b3-d279-4570-ab54-eea5d6e44ff5-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.424955 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.428284 4899 scope.go:117] "RemoveContainer" containerID="df9e22ebed3a2415c0ada8d5ae2eddde0ca334861fdf595e04d4f5a759b06ed8" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.440611 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.468406 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 08:59:35 crc kubenswrapper[4899]: E1003 08:59:35.468811 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a51e9ebc-665e-4ee1-bbef-935ff3835fbd" containerName="nova-manage" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.468826 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="a51e9ebc-665e-4ee1-bbef-935ff3835fbd" containerName="nova-manage" Oct 03 08:59:35 crc kubenswrapper[4899]: E1003 08:59:35.468853 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f81520b3-d279-4570-ab54-eea5d6e44ff5" containerName="nova-api-log" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.468859 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="f81520b3-d279-4570-ab54-eea5d6e44ff5" containerName="nova-api-log" Oct 03 08:59:35 crc kubenswrapper[4899]: E1003 08:59:35.468875 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f81520b3-d279-4570-ab54-eea5d6e44ff5" containerName="nova-api-api" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.468881 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="f81520b3-d279-4570-ab54-eea5d6e44ff5" containerName="nova-api-api" Oct 03 08:59:35 crc kubenswrapper[4899]: E1003 08:59:35.468911 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f81cc11b-ac94-4a5b-bd99-50f4580e5f14" containerName="init" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.468917 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="f81cc11b-ac94-4a5b-bd99-50f4580e5f14" containerName="init" Oct 03 08:59:35 crc kubenswrapper[4899]: E1003 08:59:35.468935 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f81cc11b-ac94-4a5b-bd99-50f4580e5f14" containerName="dnsmasq-dns" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.468942 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="f81cc11b-ac94-4a5b-bd99-50f4580e5f14" containerName="dnsmasq-dns" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.469105 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="f81520b3-d279-4570-ab54-eea5d6e44ff5" containerName="nova-api-log" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.469118 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="f81520b3-d279-4570-ab54-eea5d6e44ff5" containerName="nova-api-api" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.469126 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="f81cc11b-ac94-4a5b-bd99-50f4580e5f14" containerName="dnsmasq-dns" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.469189 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="a51e9ebc-665e-4ee1-bbef-935ff3835fbd" containerName="nova-manage" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.470356 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.473080 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.473216 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.473366 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.480826 4899 scope.go:117] "RemoveContainer" containerID="d515a6c688c6036634603e5a2db6a8554b9d9f03e89c6313627b2ae81f91befb" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.482243 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 08:59:35 crc kubenswrapper[4899]: E1003 08:59:35.487635 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d515a6c688c6036634603e5a2db6a8554b9d9f03e89c6313627b2ae81f91befb\": container with ID starting with d515a6c688c6036634603e5a2db6a8554b9d9f03e89c6313627b2ae81f91befb not found: ID does not exist" containerID="d515a6c688c6036634603e5a2db6a8554b9d9f03e89c6313627b2ae81f91befb" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.487687 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d515a6c688c6036634603e5a2db6a8554b9d9f03e89c6313627b2ae81f91befb"} err="failed to get container status \"d515a6c688c6036634603e5a2db6a8554b9d9f03e89c6313627b2ae81f91befb\": rpc error: code = NotFound desc = could not find container \"d515a6c688c6036634603e5a2db6a8554b9d9f03e89c6313627b2ae81f91befb\": container with ID starting with d515a6c688c6036634603e5a2db6a8554b9d9f03e89c6313627b2ae81f91befb not found: ID does not exist" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.487717 4899 scope.go:117] "RemoveContainer" containerID="df9e22ebed3a2415c0ada8d5ae2eddde0ca334861fdf595e04d4f5a759b06ed8" Oct 03 08:59:35 crc kubenswrapper[4899]: E1003 08:59:35.496356 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df9e22ebed3a2415c0ada8d5ae2eddde0ca334861fdf595e04d4f5a759b06ed8\": container with ID starting with df9e22ebed3a2415c0ada8d5ae2eddde0ca334861fdf595e04d4f5a759b06ed8 not found: ID does not exist" containerID="df9e22ebed3a2415c0ada8d5ae2eddde0ca334861fdf595e04d4f5a759b06ed8" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.496405 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df9e22ebed3a2415c0ada8d5ae2eddde0ca334861fdf595e04d4f5a759b06ed8"} err="failed to get container status \"df9e22ebed3a2415c0ada8d5ae2eddde0ca334861fdf595e04d4f5a759b06ed8\": rpc error: code = NotFound desc = could not find container \"df9e22ebed3a2415c0ada8d5ae2eddde0ca334861fdf595e04d4f5a759b06ed8\": container with ID starting with df9e22ebed3a2415c0ada8d5ae2eddde0ca334861fdf595e04d4f5a759b06ed8 not found: ID does not exist" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.496428 4899 scope.go:117] "RemoveContainer" containerID="d515a6c688c6036634603e5a2db6a8554b9d9f03e89c6313627b2ae81f91befb" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.496874 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d515a6c688c6036634603e5a2db6a8554b9d9f03e89c6313627b2ae81f91befb"} err="failed to get container status \"d515a6c688c6036634603e5a2db6a8554b9d9f03e89c6313627b2ae81f91befb\": rpc error: code = NotFound desc = could not find container \"d515a6c688c6036634603e5a2db6a8554b9d9f03e89c6313627b2ae81f91befb\": container with ID starting with d515a6c688c6036634603e5a2db6a8554b9d9f03e89c6313627b2ae81f91befb not found: ID does not exist" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.496911 4899 scope.go:117] "RemoveContainer" containerID="df9e22ebed3a2415c0ada8d5ae2eddde0ca334861fdf595e04d4f5a759b06ed8" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.497118 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df9e22ebed3a2415c0ada8d5ae2eddde0ca334861fdf595e04d4f5a759b06ed8"} err="failed to get container status \"df9e22ebed3a2415c0ada8d5ae2eddde0ca334861fdf595e04d4f5a759b06ed8\": rpc error: code = NotFound desc = could not find container \"df9e22ebed3a2415c0ada8d5ae2eddde0ca334861fdf595e04d4f5a759b06ed8\": container with ID starting with df9e22ebed3a2415c0ada8d5ae2eddde0ca334861fdf595e04d4f5a759b06ed8 not found: ID does not exist" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.503725 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76c142d4-6700-4120-bd50-aaf4e1b8d5b8-config-data\") pod \"nova-api-0\" (UID: \"76c142d4-6700-4120-bd50-aaf4e1b8d5b8\") " pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.503962 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c142d4-6700-4120-bd50-aaf4e1b8d5b8-internal-tls-certs\") pod \"nova-api-0\" (UID: \"76c142d4-6700-4120-bd50-aaf4e1b8d5b8\") " pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.504068 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c142d4-6700-4120-bd50-aaf4e1b8d5b8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"76c142d4-6700-4120-bd50-aaf4e1b8d5b8\") " pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.504119 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59tr7\" (UniqueName: \"kubernetes.io/projected/76c142d4-6700-4120-bd50-aaf4e1b8d5b8-kube-api-access-59tr7\") pod \"nova-api-0\" (UID: \"76c142d4-6700-4120-bd50-aaf4e1b8d5b8\") " pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.504137 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c142d4-6700-4120-bd50-aaf4e1b8d5b8-public-tls-certs\") pod \"nova-api-0\" (UID: \"76c142d4-6700-4120-bd50-aaf4e1b8d5b8\") " pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.504223 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/76c142d4-6700-4120-bd50-aaf4e1b8d5b8-logs\") pod \"nova-api-0\" (UID: \"76c142d4-6700-4120-bd50-aaf4e1b8d5b8\") " pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.606506 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c142d4-6700-4120-bd50-aaf4e1b8d5b8-internal-tls-certs\") pod \"nova-api-0\" (UID: \"76c142d4-6700-4120-bd50-aaf4e1b8d5b8\") " pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.606549 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c142d4-6700-4120-bd50-aaf4e1b8d5b8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"76c142d4-6700-4120-bd50-aaf4e1b8d5b8\") " pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.606619 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59tr7\" (UniqueName: \"kubernetes.io/projected/76c142d4-6700-4120-bd50-aaf4e1b8d5b8-kube-api-access-59tr7\") pod \"nova-api-0\" (UID: \"76c142d4-6700-4120-bd50-aaf4e1b8d5b8\") " pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.606636 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c142d4-6700-4120-bd50-aaf4e1b8d5b8-public-tls-certs\") pod \"nova-api-0\" (UID: \"76c142d4-6700-4120-bd50-aaf4e1b8d5b8\") " pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.606660 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/76c142d4-6700-4120-bd50-aaf4e1b8d5b8-logs\") pod \"nova-api-0\" (UID: \"76c142d4-6700-4120-bd50-aaf4e1b8d5b8\") " pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.606720 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76c142d4-6700-4120-bd50-aaf4e1b8d5b8-config-data\") pod \"nova-api-0\" (UID: \"76c142d4-6700-4120-bd50-aaf4e1b8d5b8\") " pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.609592 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/76c142d4-6700-4120-bd50-aaf4e1b8d5b8-logs\") pod \"nova-api-0\" (UID: \"76c142d4-6700-4120-bd50-aaf4e1b8d5b8\") " pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.611324 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c142d4-6700-4120-bd50-aaf4e1b8d5b8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"76c142d4-6700-4120-bd50-aaf4e1b8d5b8\") " pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.611467 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c142d4-6700-4120-bd50-aaf4e1b8d5b8-internal-tls-certs\") pod \"nova-api-0\" (UID: \"76c142d4-6700-4120-bd50-aaf4e1b8d5b8\") " pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.612250 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76c142d4-6700-4120-bd50-aaf4e1b8d5b8-config-data\") pod \"nova-api-0\" (UID: \"76c142d4-6700-4120-bd50-aaf4e1b8d5b8\") " pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.613282 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c142d4-6700-4120-bd50-aaf4e1b8d5b8-public-tls-certs\") pod \"nova-api-0\" (UID: \"76c142d4-6700-4120-bd50-aaf4e1b8d5b8\") " pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.625458 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59tr7\" (UniqueName: \"kubernetes.io/projected/76c142d4-6700-4120-bd50-aaf4e1b8d5b8-kube-api-access-59tr7\") pod \"nova-api-0\" (UID: \"76c142d4-6700-4120-bd50-aaf4e1b8d5b8\") " pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.638638 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.708063 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgdpk\" (UniqueName: \"kubernetes.io/projected/b3ab645e-4533-4b5f-851f-5a8b09c6dda3-kube-api-access-sgdpk\") pod \"b3ab645e-4533-4b5f-851f-5a8b09c6dda3\" (UID: \"b3ab645e-4533-4b5f-851f-5a8b09c6dda3\") " Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.708128 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ab645e-4533-4b5f-851f-5a8b09c6dda3-config-data\") pod \"b3ab645e-4533-4b5f-851f-5a8b09c6dda3\" (UID: \"b3ab645e-4533-4b5f-851f-5a8b09c6dda3\") " Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.708298 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ab645e-4533-4b5f-851f-5a8b09c6dda3-combined-ca-bundle\") pod \"b3ab645e-4533-4b5f-851f-5a8b09c6dda3\" (UID: \"b3ab645e-4533-4b5f-851f-5a8b09c6dda3\") " Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.713090 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3ab645e-4533-4b5f-851f-5a8b09c6dda3-kube-api-access-sgdpk" (OuterVolumeSpecName: "kube-api-access-sgdpk") pod "b3ab645e-4533-4b5f-851f-5a8b09c6dda3" (UID: "b3ab645e-4533-4b5f-851f-5a8b09c6dda3"). InnerVolumeSpecName "kube-api-access-sgdpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.736028 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3ab645e-4533-4b5f-851f-5a8b09c6dda3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b3ab645e-4533-4b5f-851f-5a8b09c6dda3" (UID: "b3ab645e-4533-4b5f-851f-5a8b09c6dda3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.738824 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3ab645e-4533-4b5f-851f-5a8b09c6dda3-config-data" (OuterVolumeSpecName: "config-data") pod "b3ab645e-4533-4b5f-851f-5a8b09c6dda3" (UID: "b3ab645e-4533-4b5f-851f-5a8b09c6dda3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.801800 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.826264 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ab645e-4533-4b5f-851f-5a8b09c6dda3-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.826362 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ab645e-4533-4b5f-851f-5a8b09c6dda3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:35 crc kubenswrapper[4899]: I1003 08:59:35.826396 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgdpk\" (UniqueName: \"kubernetes.io/projected/b3ab645e-4533-4b5f-851f-5a8b09c6dda3-kube-api-access-sgdpk\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.241509 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 08:59:36 crc kubenswrapper[4899]: W1003 08:59:36.244608 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76c142d4_6700_4120_bd50_aaf4e1b8d5b8.slice/crio-ea0e71cdd1232bf8df1d3bed269b8552b8dbdc1d714612335eb047164ecff54e WatchSource:0}: Error finding container ea0e71cdd1232bf8df1d3bed269b8552b8dbdc1d714612335eb047164ecff54e: Status 404 returned error can't find the container with id ea0e71cdd1232bf8df1d3bed269b8552b8dbdc1d714612335eb047164ecff54e Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.404438 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"76c142d4-6700-4120-bd50-aaf4e1b8d5b8","Type":"ContainerStarted","Data":"064e3184d913a81fc289b638f50e4e64bfa4325b04a1a04903e0a27af8d3e04e"} Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.404488 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"76c142d4-6700-4120-bd50-aaf4e1b8d5b8","Type":"ContainerStarted","Data":"ea0e71cdd1232bf8df1d3bed269b8552b8dbdc1d714612335eb047164ecff54e"} Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.406169 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b3ab645e-4533-4b5f-851f-5a8b09c6dda3","Type":"ContainerDied","Data":"97187cbf9c95e31b16fdfda3a9c24b9b63c4c3f1f270723f8766fdbe8bc18f00"} Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.406225 4899 scope.go:117] "RemoveContainer" containerID="5c2c2bf6d0ed04aa16524f3855c7ab578c8cd5e352a691f3ee0d7ce71472e469" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.406246 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.447746 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.458041 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.472787 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 08:59:36 crc kubenswrapper[4899]: E1003 08:59:36.473415 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3ab645e-4533-4b5f-851f-5a8b09c6dda3" containerName="nova-scheduler-scheduler" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.473439 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3ab645e-4533-4b5f-851f-5a8b09c6dda3" containerName="nova-scheduler-scheduler" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.473647 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3ab645e-4533-4b5f-851f-5a8b09c6dda3" containerName="nova-scheduler-scheduler" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.474389 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.476934 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.484566 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.553552 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3ab645e-4533-4b5f-851f-5a8b09c6dda3" path="/var/lib/kubelet/pods/b3ab645e-4533-4b5f-851f-5a8b09c6dda3/volumes" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.554335 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f81520b3-d279-4570-ab54-eea5d6e44ff5" path="/var/lib/kubelet/pods/f81520b3-d279-4570-ab54-eea5d6e44ff5/volumes" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.640234 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6969051c-bc07-454c-b958-b9e203f95ee5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6969051c-bc07-454c-b958-b9e203f95ee5\") " pod="openstack/nova-scheduler-0" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.640322 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6969051c-bc07-454c-b958-b9e203f95ee5-config-data\") pod \"nova-scheduler-0\" (UID: \"6969051c-bc07-454c-b958-b9e203f95ee5\") " pod="openstack/nova-scheduler-0" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.640716 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdlcg\" (UniqueName: \"kubernetes.io/projected/6969051c-bc07-454c-b958-b9e203f95ee5-kube-api-access-hdlcg\") pod \"nova-scheduler-0\" (UID: \"6969051c-bc07-454c-b958-b9e203f95ee5\") " pod="openstack/nova-scheduler-0" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.742598 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6969051c-bc07-454c-b958-b9e203f95ee5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6969051c-bc07-454c-b958-b9e203f95ee5\") " pod="openstack/nova-scheduler-0" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.742680 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6969051c-bc07-454c-b958-b9e203f95ee5-config-data\") pod \"nova-scheduler-0\" (UID: \"6969051c-bc07-454c-b958-b9e203f95ee5\") " pod="openstack/nova-scheduler-0" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.742764 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdlcg\" (UniqueName: \"kubernetes.io/projected/6969051c-bc07-454c-b958-b9e203f95ee5-kube-api-access-hdlcg\") pod \"nova-scheduler-0\" (UID: \"6969051c-bc07-454c-b958-b9e203f95ee5\") " pod="openstack/nova-scheduler-0" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.747734 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6969051c-bc07-454c-b958-b9e203f95ee5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6969051c-bc07-454c-b958-b9e203f95ee5\") " pod="openstack/nova-scheduler-0" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.748623 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6969051c-bc07-454c-b958-b9e203f95ee5-config-data\") pod \"nova-scheduler-0\" (UID: \"6969051c-bc07-454c-b958-b9e203f95ee5\") " pod="openstack/nova-scheduler-0" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.762284 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdlcg\" (UniqueName: \"kubernetes.io/projected/6969051c-bc07-454c-b958-b9e203f95ee5-kube-api-access-hdlcg\") pod \"nova-scheduler-0\" (UID: \"6969051c-bc07-454c-b958-b9e203f95ee5\") " pod="openstack/nova-scheduler-0" Oct 03 08:59:36 crc kubenswrapper[4899]: I1003 08:59:36.809567 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 08:59:37 crc kubenswrapper[4899]: I1003 08:59:37.225588 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 08:59:37 crc kubenswrapper[4899]: W1003 08:59:37.229249 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6969051c_bc07_454c_b958_b9e203f95ee5.slice/crio-dfa4e6abfbea18ca2d71a2af128306cd11e971254d2b98ff7b22689a6f48812e WatchSource:0}: Error finding container dfa4e6abfbea18ca2d71a2af128306cd11e971254d2b98ff7b22689a6f48812e: Status 404 returned error can't find the container with id dfa4e6abfbea18ca2d71a2af128306cd11e971254d2b98ff7b22689a6f48812e Oct 03 08:59:37 crc kubenswrapper[4899]: I1003 08:59:37.415613 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"76c142d4-6700-4120-bd50-aaf4e1b8d5b8","Type":"ContainerStarted","Data":"32f935e96ee2e7af2170927ed1e2f40bc727f5df02228c5fc9ee2a71cf166df7"} Oct 03 08:59:37 crc kubenswrapper[4899]: I1003 08:59:37.426459 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6969051c-bc07-454c-b958-b9e203f95ee5","Type":"ContainerStarted","Data":"539a31bfacf358a265191943490b9ea845151519615d6984613d6e8b5efe4526"} Oct 03 08:59:37 crc kubenswrapper[4899]: I1003 08:59:37.426498 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6969051c-bc07-454c-b958-b9e203f95ee5","Type":"ContainerStarted","Data":"dfa4e6abfbea18ca2d71a2af128306cd11e971254d2b98ff7b22689a6f48812e"} Oct 03 08:59:37 crc kubenswrapper[4899]: I1003 08:59:37.436453 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.436432949 podStartE2EDuration="2.436432949s" podCreationTimestamp="2025-10-03 08:59:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:59:37.431405141 +0000 UTC m=+1151.538890114" watchObservedRunningTime="2025-10-03 08:59:37.436432949 +0000 UTC m=+1151.543917902" Oct 03 08:59:37 crc kubenswrapper[4899]: I1003 08:59:37.448761 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.448723448 podStartE2EDuration="1.448723448s" podCreationTimestamp="2025-10-03 08:59:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:59:37.44655551 +0000 UTC m=+1151.554040463" watchObservedRunningTime="2025-10-03 08:59:37.448723448 +0000 UTC m=+1151.556208401" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.139690 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.268378 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ww4ww\" (UniqueName: \"kubernetes.io/projected/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-kube-api-access-ww4ww\") pod \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.268459 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-config-data\") pod \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.268500 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-combined-ca-bundle\") pod \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.268533 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-logs\") pod \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.268582 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-nova-metadata-tls-certs\") pod \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\" (UID: \"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6\") " Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.270495 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-logs" (OuterVolumeSpecName: "logs") pod "6dc10dad-9e14-4f05-9519-d7c38d5a4ca6" (UID: "6dc10dad-9e14-4f05-9519-d7c38d5a4ca6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.284546 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-kube-api-access-ww4ww" (OuterVolumeSpecName: "kube-api-access-ww4ww") pod "6dc10dad-9e14-4f05-9519-d7c38d5a4ca6" (UID: "6dc10dad-9e14-4f05-9519-d7c38d5a4ca6"). InnerVolumeSpecName "kube-api-access-ww4ww". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.297428 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-config-data" (OuterVolumeSpecName: "config-data") pod "6dc10dad-9e14-4f05-9519-d7c38d5a4ca6" (UID: "6dc10dad-9e14-4f05-9519-d7c38d5a4ca6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.297847 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6dc10dad-9e14-4f05-9519-d7c38d5a4ca6" (UID: "6dc10dad-9e14-4f05-9519-d7c38d5a4ca6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.345165 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "6dc10dad-9e14-4f05-9519-d7c38d5a4ca6" (UID: "6dc10dad-9e14-4f05-9519-d7c38d5a4ca6"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.370291 4899 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.370327 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ww4ww\" (UniqueName: \"kubernetes.io/projected/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-kube-api-access-ww4ww\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.370337 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.370347 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.370357 4899 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6-logs\") on node \"crc\" DevicePath \"\"" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.449478 4899 generic.go:334] "Generic (PLEG): container finished" podID="6dc10dad-9e14-4f05-9519-d7c38d5a4ca6" containerID="0e203adc17a136b60fad2665a004dd0cb3c96d39e92b3d66228f7e4b1ce810e9" exitCode=0 Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.450793 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.450800 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6","Type":"ContainerDied","Data":"0e203adc17a136b60fad2665a004dd0cb3c96d39e92b3d66228f7e4b1ce810e9"} Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.450923 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6dc10dad-9e14-4f05-9519-d7c38d5a4ca6","Type":"ContainerDied","Data":"f6f21c786350117bda6e83985f1e381c1a22fc34e13d8770e2492215c80945a7"} Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.450959 4899 scope.go:117] "RemoveContainer" containerID="0e203adc17a136b60fad2665a004dd0cb3c96d39e92b3d66228f7e4b1ce810e9" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.475526 4899 scope.go:117] "RemoveContainer" containerID="260295fae2fc28336557bc3d1e63ffc8c8ef713ea00b862a8b4e2d4d57d351f1" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.491560 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.510118 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.521145 4899 scope.go:117] "RemoveContainer" containerID="0e203adc17a136b60fad2665a004dd0cb3c96d39e92b3d66228f7e4b1ce810e9" Oct 03 08:59:38 crc kubenswrapper[4899]: E1003 08:59:38.524019 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e203adc17a136b60fad2665a004dd0cb3c96d39e92b3d66228f7e4b1ce810e9\": container with ID starting with 0e203adc17a136b60fad2665a004dd0cb3c96d39e92b3d66228f7e4b1ce810e9 not found: ID does not exist" containerID="0e203adc17a136b60fad2665a004dd0cb3c96d39e92b3d66228f7e4b1ce810e9" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.524161 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e203adc17a136b60fad2665a004dd0cb3c96d39e92b3d66228f7e4b1ce810e9"} err="failed to get container status \"0e203adc17a136b60fad2665a004dd0cb3c96d39e92b3d66228f7e4b1ce810e9\": rpc error: code = NotFound desc = could not find container \"0e203adc17a136b60fad2665a004dd0cb3c96d39e92b3d66228f7e4b1ce810e9\": container with ID starting with 0e203adc17a136b60fad2665a004dd0cb3c96d39e92b3d66228f7e4b1ce810e9 not found: ID does not exist" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.524250 4899 scope.go:117] "RemoveContainer" containerID="260295fae2fc28336557bc3d1e63ffc8c8ef713ea00b862a8b4e2d4d57d351f1" Oct 03 08:59:38 crc kubenswrapper[4899]: E1003 08:59:38.524713 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"260295fae2fc28336557bc3d1e63ffc8c8ef713ea00b862a8b4e2d4d57d351f1\": container with ID starting with 260295fae2fc28336557bc3d1e63ffc8c8ef713ea00b862a8b4e2d4d57d351f1 not found: ID does not exist" containerID="260295fae2fc28336557bc3d1e63ffc8c8ef713ea00b862a8b4e2d4d57d351f1" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.524749 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"260295fae2fc28336557bc3d1e63ffc8c8ef713ea00b862a8b4e2d4d57d351f1"} err="failed to get container status \"260295fae2fc28336557bc3d1e63ffc8c8ef713ea00b862a8b4e2d4d57d351f1\": rpc error: code = NotFound desc = could not find container \"260295fae2fc28336557bc3d1e63ffc8c8ef713ea00b862a8b4e2d4d57d351f1\": container with ID starting with 260295fae2fc28336557bc3d1e63ffc8c8ef713ea00b862a8b4e2d4d57d351f1 not found: ID does not exist" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.525350 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 08:59:38 crc kubenswrapper[4899]: E1003 08:59:38.525809 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dc10dad-9e14-4f05-9519-d7c38d5a4ca6" containerName="nova-metadata-log" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.525834 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dc10dad-9e14-4f05-9519-d7c38d5a4ca6" containerName="nova-metadata-log" Oct 03 08:59:38 crc kubenswrapper[4899]: E1003 08:59:38.525852 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dc10dad-9e14-4f05-9519-d7c38d5a4ca6" containerName="nova-metadata-metadata" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.525861 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dc10dad-9e14-4f05-9519-d7c38d5a4ca6" containerName="nova-metadata-metadata" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.526123 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dc10dad-9e14-4f05-9519-d7c38d5a4ca6" containerName="nova-metadata-log" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.526144 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dc10dad-9e14-4f05-9519-d7c38d5a4ca6" containerName="nova-metadata-metadata" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.528535 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.530877 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.531080 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.540613 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6dc10dad-9e14-4f05-9519-d7c38d5a4ca6" path="/var/lib/kubelet/pods/6dc10dad-9e14-4f05-9519-d7c38d5a4ca6/volumes" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.544469 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.675730 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c14a82a-78c7-4366-a2bc-91e1f880d841-logs\") pod \"nova-metadata-0\" (UID: \"4c14a82a-78c7-4366-a2bc-91e1f880d841\") " pod="openstack/nova-metadata-0" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.675814 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c14a82a-78c7-4366-a2bc-91e1f880d841-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4c14a82a-78c7-4366-a2bc-91e1f880d841\") " pod="openstack/nova-metadata-0" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.676615 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c14a82a-78c7-4366-a2bc-91e1f880d841-config-data\") pod \"nova-metadata-0\" (UID: \"4c14a82a-78c7-4366-a2bc-91e1f880d841\") " pod="openstack/nova-metadata-0" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.677049 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tth2w\" (UniqueName: \"kubernetes.io/projected/4c14a82a-78c7-4366-a2bc-91e1f880d841-kube-api-access-tth2w\") pod \"nova-metadata-0\" (UID: \"4c14a82a-78c7-4366-a2bc-91e1f880d841\") " pod="openstack/nova-metadata-0" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.677150 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c14a82a-78c7-4366-a2bc-91e1f880d841-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4c14a82a-78c7-4366-a2bc-91e1f880d841\") " pod="openstack/nova-metadata-0" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.779392 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c14a82a-78c7-4366-a2bc-91e1f880d841-config-data\") pod \"nova-metadata-0\" (UID: \"4c14a82a-78c7-4366-a2bc-91e1f880d841\") " pod="openstack/nova-metadata-0" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.779466 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tth2w\" (UniqueName: \"kubernetes.io/projected/4c14a82a-78c7-4366-a2bc-91e1f880d841-kube-api-access-tth2w\") pod \"nova-metadata-0\" (UID: \"4c14a82a-78c7-4366-a2bc-91e1f880d841\") " pod="openstack/nova-metadata-0" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.779512 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c14a82a-78c7-4366-a2bc-91e1f880d841-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4c14a82a-78c7-4366-a2bc-91e1f880d841\") " pod="openstack/nova-metadata-0" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.779567 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c14a82a-78c7-4366-a2bc-91e1f880d841-logs\") pod \"nova-metadata-0\" (UID: \"4c14a82a-78c7-4366-a2bc-91e1f880d841\") " pod="openstack/nova-metadata-0" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.779610 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c14a82a-78c7-4366-a2bc-91e1f880d841-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4c14a82a-78c7-4366-a2bc-91e1f880d841\") " pod="openstack/nova-metadata-0" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.780051 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c14a82a-78c7-4366-a2bc-91e1f880d841-logs\") pod \"nova-metadata-0\" (UID: \"4c14a82a-78c7-4366-a2bc-91e1f880d841\") " pod="openstack/nova-metadata-0" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.784481 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c14a82a-78c7-4366-a2bc-91e1f880d841-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4c14a82a-78c7-4366-a2bc-91e1f880d841\") " pod="openstack/nova-metadata-0" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.785333 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c14a82a-78c7-4366-a2bc-91e1f880d841-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4c14a82a-78c7-4366-a2bc-91e1f880d841\") " pod="openstack/nova-metadata-0" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.785923 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c14a82a-78c7-4366-a2bc-91e1f880d841-config-data\") pod \"nova-metadata-0\" (UID: \"4c14a82a-78c7-4366-a2bc-91e1f880d841\") " pod="openstack/nova-metadata-0" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.797579 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tth2w\" (UniqueName: \"kubernetes.io/projected/4c14a82a-78c7-4366-a2bc-91e1f880d841-kube-api-access-tth2w\") pod \"nova-metadata-0\" (UID: \"4c14a82a-78c7-4366-a2bc-91e1f880d841\") " pod="openstack/nova-metadata-0" Oct 03 08:59:38 crc kubenswrapper[4899]: I1003 08:59:38.853027 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 08:59:39 crc kubenswrapper[4899]: I1003 08:59:39.293102 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 08:59:39 crc kubenswrapper[4899]: W1003 08:59:39.298632 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4c14a82a_78c7_4366_a2bc_91e1f880d841.slice/crio-fc3573f5106542461a1cfbf1b6c5e9d951eab911719b2dd33f50db4fc0cac250 WatchSource:0}: Error finding container fc3573f5106542461a1cfbf1b6c5e9d951eab911719b2dd33f50db4fc0cac250: Status 404 returned error can't find the container with id fc3573f5106542461a1cfbf1b6c5e9d951eab911719b2dd33f50db4fc0cac250 Oct 03 08:59:39 crc kubenswrapper[4899]: I1003 08:59:39.461672 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4c14a82a-78c7-4366-a2bc-91e1f880d841","Type":"ContainerStarted","Data":"fc3573f5106542461a1cfbf1b6c5e9d951eab911719b2dd33f50db4fc0cac250"} Oct 03 08:59:40 crc kubenswrapper[4899]: I1003 08:59:40.473884 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4c14a82a-78c7-4366-a2bc-91e1f880d841","Type":"ContainerStarted","Data":"25cb0c55508e89e04cddba9466cc555482694693981bce18e5332f6b048c4556"} Oct 03 08:59:40 crc kubenswrapper[4899]: I1003 08:59:40.474825 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4c14a82a-78c7-4366-a2bc-91e1f880d841","Type":"ContainerStarted","Data":"3fa38c2359c16c5323c00ed2c0473faf425b4fcea3b73a1e3a7eeefac84ca426"} Oct 03 08:59:40 crc kubenswrapper[4899]: I1003 08:59:40.496006 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.495985168 podStartE2EDuration="2.495985168s" podCreationTimestamp="2025-10-03 08:59:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 08:59:40.492420206 +0000 UTC m=+1154.599905159" watchObservedRunningTime="2025-10-03 08:59:40.495985168 +0000 UTC m=+1154.603470131" Oct 03 08:59:41 crc kubenswrapper[4899]: I1003 08:59:41.810760 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 03 08:59:42 crc kubenswrapper[4899]: I1003 08:59:42.197826 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 08:59:42 crc kubenswrapper[4899]: I1003 08:59:42.197929 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 08:59:42 crc kubenswrapper[4899]: I1003 08:59:42.197990 4899 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 08:59:42 crc kubenswrapper[4899]: I1003 08:59:42.198719 4899 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"890db85d4fbdd365302c6e5ed34afe49d3195769246d9b9d458ffa94cd16c5f8"} pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 08:59:42 crc kubenswrapper[4899]: I1003 08:59:42.198779 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" containerID="cri-o://890db85d4fbdd365302c6e5ed34afe49d3195769246d9b9d458ffa94cd16c5f8" gracePeriod=600 Oct 03 08:59:42 crc kubenswrapper[4899]: I1003 08:59:42.499229 4899 generic.go:334] "Generic (PLEG): container finished" podID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerID="890db85d4fbdd365302c6e5ed34afe49d3195769246d9b9d458ffa94cd16c5f8" exitCode=0 Oct 03 08:59:42 crc kubenswrapper[4899]: I1003 08:59:42.499460 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerDied","Data":"890db85d4fbdd365302c6e5ed34afe49d3195769246d9b9d458ffa94cd16c5f8"} Oct 03 08:59:42 crc kubenswrapper[4899]: I1003 08:59:42.499857 4899 scope.go:117] "RemoveContainer" containerID="56a8af8272c5f0a0da6c3696ab9575cf28a2d27b7f650888f1dd37ac24669078" Oct 03 08:59:43 crc kubenswrapper[4899]: I1003 08:59:43.509810 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerStarted","Data":"def1a55ad00f7229a571d38a4260404a613179a181d031abe241127ae4349c21"} Oct 03 08:59:43 crc kubenswrapper[4899]: I1003 08:59:43.853481 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 08:59:43 crc kubenswrapper[4899]: I1003 08:59:43.853527 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 08:59:45 crc kubenswrapper[4899]: I1003 08:59:45.802119 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 08:59:45 crc kubenswrapper[4899]: I1003 08:59:45.802631 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 08:59:46 crc kubenswrapper[4899]: I1003 08:59:46.810143 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 03 08:59:46 crc kubenswrapper[4899]: I1003 08:59:46.826201 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="76c142d4-6700-4120-bd50-aaf4e1b8d5b8" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 08:59:46 crc kubenswrapper[4899]: I1003 08:59:46.826214 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="76c142d4-6700-4120-bd50-aaf4e1b8d5b8" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 08:59:46 crc kubenswrapper[4899]: I1003 08:59:46.845661 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 03 08:59:47 crc kubenswrapper[4899]: I1003 08:59:47.571217 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 03 08:59:48 crc kubenswrapper[4899]: I1003 08:59:48.854091 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 08:59:48 crc kubenswrapper[4899]: I1003 08:59:48.854164 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 08:59:49 crc kubenswrapper[4899]: I1003 08:59:49.866242 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4c14a82a-78c7-4366-a2bc-91e1f880d841" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.204:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 08:59:49 crc kubenswrapper[4899]: I1003 08:59:49.866244 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4c14a82a-78c7-4366-a2bc-91e1f880d841" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.204:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 08:59:52 crc kubenswrapper[4899]: I1003 08:59:52.858849 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 03 08:59:55 crc kubenswrapper[4899]: I1003 08:59:55.809055 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 08:59:55 crc kubenswrapper[4899]: I1003 08:59:55.809386 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 08:59:55 crc kubenswrapper[4899]: I1003 08:59:55.809616 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 08:59:55 crc kubenswrapper[4899]: I1003 08:59:55.809654 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 08:59:55 crc kubenswrapper[4899]: I1003 08:59:55.816711 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 08:59:55 crc kubenswrapper[4899]: I1003 08:59:55.818106 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 08:59:58 crc kubenswrapper[4899]: I1003 08:59:58.861959 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 08:59:58 crc kubenswrapper[4899]: I1003 08:59:58.866007 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 08:59:58 crc kubenswrapper[4899]: I1003 08:59:58.870465 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 08:59:59 crc kubenswrapper[4899]: I1003 08:59:59.651534 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 09:00:00 crc kubenswrapper[4899]: I1003 09:00:00.157658 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4"] Oct 03 09:00:00 crc kubenswrapper[4899]: I1003 09:00:00.159297 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4" Oct 03 09:00:00 crc kubenswrapper[4899]: I1003 09:00:00.167505 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4"] Oct 03 09:00:00 crc kubenswrapper[4899]: I1003 09:00:00.199230 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 09:00:00 crc kubenswrapper[4899]: I1003 09:00:00.199614 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 09:00:00 crc kubenswrapper[4899]: I1003 09:00:00.271738 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/559c1b4c-3181-42b6-9c75-d6fafcb769ab-config-volume\") pod \"collect-profiles-29324700-r2dw4\" (UID: \"559c1b4c-3181-42b6-9c75-d6fafcb769ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4" Oct 03 09:00:00 crc kubenswrapper[4899]: I1003 09:00:00.271808 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxg8s\" (UniqueName: \"kubernetes.io/projected/559c1b4c-3181-42b6-9c75-d6fafcb769ab-kube-api-access-mxg8s\") pod \"collect-profiles-29324700-r2dw4\" (UID: \"559c1b4c-3181-42b6-9c75-d6fafcb769ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4" Oct 03 09:00:00 crc kubenswrapper[4899]: I1003 09:00:00.272034 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/559c1b4c-3181-42b6-9c75-d6fafcb769ab-secret-volume\") pod \"collect-profiles-29324700-r2dw4\" (UID: \"559c1b4c-3181-42b6-9c75-d6fafcb769ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4" Oct 03 09:00:00 crc kubenswrapper[4899]: I1003 09:00:00.373527 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/559c1b4c-3181-42b6-9c75-d6fafcb769ab-secret-volume\") pod \"collect-profiles-29324700-r2dw4\" (UID: \"559c1b4c-3181-42b6-9c75-d6fafcb769ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4" Oct 03 09:00:00 crc kubenswrapper[4899]: I1003 09:00:00.373658 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/559c1b4c-3181-42b6-9c75-d6fafcb769ab-config-volume\") pod \"collect-profiles-29324700-r2dw4\" (UID: \"559c1b4c-3181-42b6-9c75-d6fafcb769ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4" Oct 03 09:00:00 crc kubenswrapper[4899]: I1003 09:00:00.373688 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxg8s\" (UniqueName: \"kubernetes.io/projected/559c1b4c-3181-42b6-9c75-d6fafcb769ab-kube-api-access-mxg8s\") pod \"collect-profiles-29324700-r2dw4\" (UID: \"559c1b4c-3181-42b6-9c75-d6fafcb769ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4" Oct 03 09:00:00 crc kubenswrapper[4899]: I1003 09:00:00.374485 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/559c1b4c-3181-42b6-9c75-d6fafcb769ab-config-volume\") pod \"collect-profiles-29324700-r2dw4\" (UID: \"559c1b4c-3181-42b6-9c75-d6fafcb769ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4" Oct 03 09:00:00 crc kubenswrapper[4899]: I1003 09:00:00.382802 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/559c1b4c-3181-42b6-9c75-d6fafcb769ab-secret-volume\") pod \"collect-profiles-29324700-r2dw4\" (UID: \"559c1b4c-3181-42b6-9c75-d6fafcb769ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4" Oct 03 09:00:00 crc kubenswrapper[4899]: I1003 09:00:00.397689 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxg8s\" (UniqueName: \"kubernetes.io/projected/559c1b4c-3181-42b6-9c75-d6fafcb769ab-kube-api-access-mxg8s\") pod \"collect-profiles-29324700-r2dw4\" (UID: \"559c1b4c-3181-42b6-9c75-d6fafcb769ab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4" Oct 03 09:00:00 crc kubenswrapper[4899]: I1003 09:00:00.523829 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4" Oct 03 09:00:00 crc kubenswrapper[4899]: I1003 09:00:00.981465 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4"] Oct 03 09:00:01 crc kubenswrapper[4899]: I1003 09:00:01.668991 4899 generic.go:334] "Generic (PLEG): container finished" podID="559c1b4c-3181-42b6-9c75-d6fafcb769ab" containerID="d41168deaddca0fd748dfeb7ade6d473ace59560bd489f04a33fe6d000006bfb" exitCode=0 Oct 03 09:00:01 crc kubenswrapper[4899]: I1003 09:00:01.669095 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4" event={"ID":"559c1b4c-3181-42b6-9c75-d6fafcb769ab","Type":"ContainerDied","Data":"d41168deaddca0fd748dfeb7ade6d473ace59560bd489f04a33fe6d000006bfb"} Oct 03 09:00:01 crc kubenswrapper[4899]: I1003 09:00:01.669314 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4" event={"ID":"559c1b4c-3181-42b6-9c75-d6fafcb769ab","Type":"ContainerStarted","Data":"f7c0eb5bf12b92c1990c7da56155de452662f9b72275c18c314a90b7020d6f88"} Oct 03 09:00:03 crc kubenswrapper[4899]: I1003 09:00:03.025494 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4" Oct 03 09:00:03 crc kubenswrapper[4899]: I1003 09:00:03.125789 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxg8s\" (UniqueName: \"kubernetes.io/projected/559c1b4c-3181-42b6-9c75-d6fafcb769ab-kube-api-access-mxg8s\") pod \"559c1b4c-3181-42b6-9c75-d6fafcb769ab\" (UID: \"559c1b4c-3181-42b6-9c75-d6fafcb769ab\") " Oct 03 09:00:03 crc kubenswrapper[4899]: I1003 09:00:03.125939 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/559c1b4c-3181-42b6-9c75-d6fafcb769ab-secret-volume\") pod \"559c1b4c-3181-42b6-9c75-d6fafcb769ab\" (UID: \"559c1b4c-3181-42b6-9c75-d6fafcb769ab\") " Oct 03 09:00:03 crc kubenswrapper[4899]: I1003 09:00:03.126078 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/559c1b4c-3181-42b6-9c75-d6fafcb769ab-config-volume\") pod \"559c1b4c-3181-42b6-9c75-d6fafcb769ab\" (UID: \"559c1b4c-3181-42b6-9c75-d6fafcb769ab\") " Oct 03 09:00:03 crc kubenswrapper[4899]: I1003 09:00:03.127016 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/559c1b4c-3181-42b6-9c75-d6fafcb769ab-config-volume" (OuterVolumeSpecName: "config-volume") pod "559c1b4c-3181-42b6-9c75-d6fafcb769ab" (UID: "559c1b4c-3181-42b6-9c75-d6fafcb769ab"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:00:03 crc kubenswrapper[4899]: I1003 09:00:03.130971 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/559c1b4c-3181-42b6-9c75-d6fafcb769ab-kube-api-access-mxg8s" (OuterVolumeSpecName: "kube-api-access-mxg8s") pod "559c1b4c-3181-42b6-9c75-d6fafcb769ab" (UID: "559c1b4c-3181-42b6-9c75-d6fafcb769ab"). InnerVolumeSpecName "kube-api-access-mxg8s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:00:03 crc kubenswrapper[4899]: I1003 09:00:03.131153 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/559c1b4c-3181-42b6-9c75-d6fafcb769ab-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "559c1b4c-3181-42b6-9c75-d6fafcb769ab" (UID: "559c1b4c-3181-42b6-9c75-d6fafcb769ab"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:00:03 crc kubenswrapper[4899]: I1003 09:00:03.227661 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxg8s\" (UniqueName: \"kubernetes.io/projected/559c1b4c-3181-42b6-9c75-d6fafcb769ab-kube-api-access-mxg8s\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:03 crc kubenswrapper[4899]: I1003 09:00:03.228025 4899 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/559c1b4c-3181-42b6-9c75-d6fafcb769ab-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:03 crc kubenswrapper[4899]: I1003 09:00:03.228122 4899 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/559c1b4c-3181-42b6-9c75-d6fafcb769ab-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:03 crc kubenswrapper[4899]: I1003 09:00:03.685003 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4" event={"ID":"559c1b4c-3181-42b6-9c75-d6fafcb769ab","Type":"ContainerDied","Data":"f7c0eb5bf12b92c1990c7da56155de452662f9b72275c18c314a90b7020d6f88"} Oct 03 09:00:03 crc kubenswrapper[4899]: I1003 09:00:03.685338 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7c0eb5bf12b92c1990c7da56155de452662f9b72275c18c314a90b7020d6f88" Oct 03 09:00:03 crc kubenswrapper[4899]: I1003 09:00:03.685084 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4" Oct 03 09:00:08 crc kubenswrapper[4899]: I1003 09:00:08.772369 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 09:00:09 crc kubenswrapper[4899]: I1003 09:00:09.551387 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 09:00:13 crc kubenswrapper[4899]: I1003 09:00:13.008508 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="2d0a71f9-b4af-49f7-b2fe-267a78a4c086" containerName="rabbitmq" containerID="cri-o://938fc7c3cbc765fb2a1b2e95e7e56d472486dba80c36a00504469c810523131e" gracePeriod=604796 Oct 03 09:00:13 crc kubenswrapper[4899]: I1003 09:00:13.923985 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="cff2733b-858c-4578-abcb-a0c503b556d3" containerName="rabbitmq" containerID="cri-o://db44beff9f0057405792026074513811d0b482b82b223a76e2fae8a18ac6215d" gracePeriod=604796 Oct 03 09:00:14 crc kubenswrapper[4899]: I1003 09:00:14.804146 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="2d0a71f9-b4af-49f7-b2fe-267a78a4c086" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.100:5671: connect: connection refused" Oct 03 09:00:15 crc kubenswrapper[4899]: I1003 09:00:15.090820 4899 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="cff2733b-858c-4578-abcb-a0c503b556d3" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.456760 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.533108 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.533315 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-plugins\") pod \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.533405 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-config-data\") pod \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.533491 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-server-conf\") pod \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.533565 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-pod-info\") pod \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.533592 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-449ld\" (UniqueName: \"kubernetes.io/projected/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-kube-api-access-449ld\") pod \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.533770 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-plugins-conf\") pod \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.533931 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-confd\") pod \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.534074 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-erlang-cookie-secret\") pod \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.534155 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-tls\") pod \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.534228 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-erlang-cookie\") pod \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\" (UID: \"2d0a71f9-b4af-49f7-b2fe-267a78a4c086\") " Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.537944 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "2d0a71f9-b4af-49f7-b2fe-267a78a4c086" (UID: "2d0a71f9-b4af-49f7-b2fe-267a78a4c086"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.538852 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "2d0a71f9-b4af-49f7-b2fe-267a78a4c086" (UID: "2d0a71f9-b4af-49f7-b2fe-267a78a4c086"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.541847 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "2d0a71f9-b4af-49f7-b2fe-267a78a4c086" (UID: "2d0a71f9-b4af-49f7-b2fe-267a78a4c086"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.543662 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-kube-api-access-449ld" (OuterVolumeSpecName: "kube-api-access-449ld") pod "2d0a71f9-b4af-49f7-b2fe-267a78a4c086" (UID: "2d0a71f9-b4af-49f7-b2fe-267a78a4c086"). InnerVolumeSpecName "kube-api-access-449ld". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.546239 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "2d0a71f9-b4af-49f7-b2fe-267a78a4c086" (UID: "2d0a71f9-b4af-49f7-b2fe-267a78a4c086"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.546831 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "2d0a71f9-b4af-49f7-b2fe-267a78a4c086" (UID: "2d0a71f9-b4af-49f7-b2fe-267a78a4c086"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.548737 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "2d0a71f9-b4af-49f7-b2fe-267a78a4c086" (UID: "2d0a71f9-b4af-49f7-b2fe-267a78a4c086"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.565389 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-pod-info" (OuterVolumeSpecName: "pod-info") pod "2d0a71f9-b4af-49f7-b2fe-267a78a4c086" (UID: "2d0a71f9-b4af-49f7-b2fe-267a78a4c086"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.586799 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-config-data" (OuterVolumeSpecName: "config-data") pod "2d0a71f9-b4af-49f7-b2fe-267a78a4c086" (UID: "2d0a71f9-b4af-49f7-b2fe-267a78a4c086"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.638855 4899 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.638911 4899 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.638924 4899 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.638932 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.638940 4899 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-pod-info\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.638949 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-449ld\" (UniqueName: \"kubernetes.io/projected/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-kube-api-access-449ld\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.638957 4899 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.638965 4899 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.638975 4899 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.646381 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-server-conf" (OuterVolumeSpecName: "server-conf") pod "2d0a71f9-b4af-49f7-b2fe-267a78a4c086" (UID: "2d0a71f9-b4af-49f7-b2fe-267a78a4c086"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.666742 4899 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.693191 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "2d0a71f9-b4af-49f7-b2fe-267a78a4c086" (UID: "2d0a71f9-b4af-49f7-b2fe-267a78a4c086"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.741346 4899 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.741389 4899 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-server-conf\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.741404 4899 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2d0a71f9-b4af-49f7-b2fe-267a78a4c086-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.817800 4899 generic.go:334] "Generic (PLEG): container finished" podID="2d0a71f9-b4af-49f7-b2fe-267a78a4c086" containerID="938fc7c3cbc765fb2a1b2e95e7e56d472486dba80c36a00504469c810523131e" exitCode=0 Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.817855 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2d0a71f9-b4af-49f7-b2fe-267a78a4c086","Type":"ContainerDied","Data":"938fc7c3cbc765fb2a1b2e95e7e56d472486dba80c36a00504469c810523131e"} Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.817914 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2d0a71f9-b4af-49f7-b2fe-267a78a4c086","Type":"ContainerDied","Data":"f984cd2bb0c3cb28c24c9c2b00efebfa432b4d1fd873bd0163cd5a7cf016ad32"} Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.817938 4899 scope.go:117] "RemoveContainer" containerID="938fc7c3cbc765fb2a1b2e95e7e56d472486dba80c36a00504469c810523131e" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.818184 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.846416 4899 scope.go:117] "RemoveContainer" containerID="473248a8c249a4ad67707f5370440c74ec966684cf8db9efdffc15efa23aea2f" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.859647 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.869718 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.884127 4899 scope.go:117] "RemoveContainer" containerID="938fc7c3cbc765fb2a1b2e95e7e56d472486dba80c36a00504469c810523131e" Oct 03 09:00:19 crc kubenswrapper[4899]: E1003 09:00:19.884605 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"938fc7c3cbc765fb2a1b2e95e7e56d472486dba80c36a00504469c810523131e\": container with ID starting with 938fc7c3cbc765fb2a1b2e95e7e56d472486dba80c36a00504469c810523131e not found: ID does not exist" containerID="938fc7c3cbc765fb2a1b2e95e7e56d472486dba80c36a00504469c810523131e" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.884705 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.884704 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"938fc7c3cbc765fb2a1b2e95e7e56d472486dba80c36a00504469c810523131e"} err="failed to get container status \"938fc7c3cbc765fb2a1b2e95e7e56d472486dba80c36a00504469c810523131e\": rpc error: code = NotFound desc = could not find container \"938fc7c3cbc765fb2a1b2e95e7e56d472486dba80c36a00504469c810523131e\": container with ID starting with 938fc7c3cbc765fb2a1b2e95e7e56d472486dba80c36a00504469c810523131e not found: ID does not exist" Oct 03 09:00:19 crc kubenswrapper[4899]: E1003 09:00:19.885477 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d0a71f9-b4af-49f7-b2fe-267a78a4c086" containerName="rabbitmq" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.885508 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d0a71f9-b4af-49f7-b2fe-267a78a4c086" containerName="rabbitmq" Oct 03 09:00:19 crc kubenswrapper[4899]: E1003 09:00:19.885530 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="559c1b4c-3181-42b6-9c75-d6fafcb769ab" containerName="collect-profiles" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.885538 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="559c1b4c-3181-42b6-9c75-d6fafcb769ab" containerName="collect-profiles" Oct 03 09:00:19 crc kubenswrapper[4899]: E1003 09:00:19.885575 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d0a71f9-b4af-49f7-b2fe-267a78a4c086" containerName="setup-container" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.885584 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d0a71f9-b4af-49f7-b2fe-267a78a4c086" containerName="setup-container" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.885594 4899 scope.go:117] "RemoveContainer" containerID="473248a8c249a4ad67707f5370440c74ec966684cf8db9efdffc15efa23aea2f" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.886234 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="559c1b4c-3181-42b6-9c75-d6fafcb769ab" containerName="collect-profiles" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.886266 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d0a71f9-b4af-49f7-b2fe-267a78a4c086" containerName="rabbitmq" Oct 03 09:00:19 crc kubenswrapper[4899]: E1003 09:00:19.887013 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"473248a8c249a4ad67707f5370440c74ec966684cf8db9efdffc15efa23aea2f\": container with ID starting with 473248a8c249a4ad67707f5370440c74ec966684cf8db9efdffc15efa23aea2f not found: ID does not exist" containerID="473248a8c249a4ad67707f5370440c74ec966684cf8db9efdffc15efa23aea2f" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.887075 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"473248a8c249a4ad67707f5370440c74ec966684cf8db9efdffc15efa23aea2f"} err="failed to get container status \"473248a8c249a4ad67707f5370440c74ec966684cf8db9efdffc15efa23aea2f\": rpc error: code = NotFound desc = could not find container \"473248a8c249a4ad67707f5370440c74ec966684cf8db9efdffc15efa23aea2f\": container with ID starting with 473248a8c249a4ad67707f5370440c74ec966684cf8db9efdffc15efa23aea2f not found: ID does not exist" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.887529 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.894336 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.894376 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.894447 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.894715 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.894861 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.895052 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-6lr97" Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.908775 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 09:00:19 crc kubenswrapper[4899]: I1003 09:00:19.909091 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.050199 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jr6c4\" (UniqueName: \"kubernetes.io/projected/7415d874-aa51-4fc4-8b40-b487392c248c-kube-api-access-jr6c4\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.050277 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7415d874-aa51-4fc4-8b40-b487392c248c-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.050364 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7415d874-aa51-4fc4-8b40-b487392c248c-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.050407 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7415d874-aa51-4fc4-8b40-b487392c248c-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.050457 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7415d874-aa51-4fc4-8b40-b487392c248c-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.050541 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7415d874-aa51-4fc4-8b40-b487392c248c-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.050582 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7415d874-aa51-4fc4-8b40-b487392c248c-config-data\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.050626 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.050666 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7415d874-aa51-4fc4-8b40-b487392c248c-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.050699 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7415d874-aa51-4fc4-8b40-b487392c248c-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.050723 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7415d874-aa51-4fc4-8b40-b487392c248c-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.153292 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7415d874-aa51-4fc4-8b40-b487392c248c-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.153368 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jr6c4\" (UniqueName: \"kubernetes.io/projected/7415d874-aa51-4fc4-8b40-b487392c248c-kube-api-access-jr6c4\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.153411 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7415d874-aa51-4fc4-8b40-b487392c248c-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.153467 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7415d874-aa51-4fc4-8b40-b487392c248c-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.153510 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7415d874-aa51-4fc4-8b40-b487392c248c-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.153577 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7415d874-aa51-4fc4-8b40-b487392c248c-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.153673 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7415d874-aa51-4fc4-8b40-b487392c248c-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.153722 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7415d874-aa51-4fc4-8b40-b487392c248c-config-data\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.153780 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.153834 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7415d874-aa51-4fc4-8b40-b487392c248c-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.154016 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7415d874-aa51-4fc4-8b40-b487392c248c-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.154612 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7415d874-aa51-4fc4-8b40-b487392c248c-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.156025 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7415d874-aa51-4fc4-8b40-b487392c248c-config-data\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.156318 4899 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.156386 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7415d874-aa51-4fc4-8b40-b487392c248c-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.156478 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7415d874-aa51-4fc4-8b40-b487392c248c-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.159319 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7415d874-aa51-4fc4-8b40-b487392c248c-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.166545 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7415d874-aa51-4fc4-8b40-b487392c248c-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.167430 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7415d874-aa51-4fc4-8b40-b487392c248c-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.171340 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7415d874-aa51-4fc4-8b40-b487392c248c-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.172724 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7415d874-aa51-4fc4-8b40-b487392c248c-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.175067 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jr6c4\" (UniqueName: \"kubernetes.io/projected/7415d874-aa51-4fc4-8b40-b487392c248c-kube-api-access-jr6c4\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.197936 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"7415d874-aa51-4fc4-8b40-b487392c248c\") " pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.217712 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.545060 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d0a71f9-b4af-49f7-b2fe-267a78a4c086" path="/var/lib/kubelet/pods/2d0a71f9-b4af-49f7-b2fe-267a78a4c086/volumes" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.557663 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.664076 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-plugins\") pod \"cff2733b-858c-4578-abcb-a0c503b556d3\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.664170 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cff2733b-858c-4578-abcb-a0c503b556d3-config-data\") pod \"cff2733b-858c-4578-abcb-a0c503b556d3\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.664217 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jx7bs\" (UniqueName: \"kubernetes.io/projected/cff2733b-858c-4578-abcb-a0c503b556d3-kube-api-access-jx7bs\") pod \"cff2733b-858c-4578-abcb-a0c503b556d3\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.664272 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cff2733b-858c-4578-abcb-a0c503b556d3-server-conf\") pod \"cff2733b-858c-4578-abcb-a0c503b556d3\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.664315 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-erlang-cookie\") pod \"cff2733b-858c-4578-abcb-a0c503b556d3\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.664336 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-tls\") pod \"cff2733b-858c-4578-abcb-a0c503b556d3\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.664382 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-confd\") pod \"cff2733b-858c-4578-abcb-a0c503b556d3\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.664406 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cff2733b-858c-4578-abcb-a0c503b556d3-plugins-conf\") pod \"cff2733b-858c-4578-abcb-a0c503b556d3\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.664439 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cff2733b-858c-4578-abcb-a0c503b556d3-pod-info\") pod \"cff2733b-858c-4578-abcb-a0c503b556d3\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.664516 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cff2733b-858c-4578-abcb-a0c503b556d3-erlang-cookie-secret\") pod \"cff2733b-858c-4578-abcb-a0c503b556d3\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.664583 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"cff2733b-858c-4578-abcb-a0c503b556d3\" (UID: \"cff2733b-858c-4578-abcb-a0c503b556d3\") " Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.664670 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "cff2733b-858c-4578-abcb-a0c503b556d3" (UID: "cff2733b-858c-4578-abcb-a0c503b556d3"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.665742 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "cff2733b-858c-4578-abcb-a0c503b556d3" (UID: "cff2733b-858c-4578-abcb-a0c503b556d3"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.665865 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cff2733b-858c-4578-abcb-a0c503b556d3-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "cff2733b-858c-4578-abcb-a0c503b556d3" (UID: "cff2733b-858c-4578-abcb-a0c503b556d3"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.666162 4899 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.666185 4899 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.666196 4899 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cff2733b-858c-4578-abcb-a0c503b556d3-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.671086 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/cff2733b-858c-4578-abcb-a0c503b556d3-pod-info" (OuterVolumeSpecName: "pod-info") pod "cff2733b-858c-4578-abcb-a0c503b556d3" (UID: "cff2733b-858c-4578-abcb-a0c503b556d3"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.671120 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cff2733b-858c-4578-abcb-a0c503b556d3-kube-api-access-jx7bs" (OuterVolumeSpecName: "kube-api-access-jx7bs") pod "cff2733b-858c-4578-abcb-a0c503b556d3" (UID: "cff2733b-858c-4578-abcb-a0c503b556d3"). InnerVolumeSpecName "kube-api-access-jx7bs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.671110 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cff2733b-858c-4578-abcb-a0c503b556d3-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "cff2733b-858c-4578-abcb-a0c503b556d3" (UID: "cff2733b-858c-4578-abcb-a0c503b556d3"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.671154 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "persistence") pod "cff2733b-858c-4578-abcb-a0c503b556d3" (UID: "cff2733b-858c-4578-abcb-a0c503b556d3"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.672504 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "cff2733b-858c-4578-abcb-a0c503b556d3" (UID: "cff2733b-858c-4578-abcb-a0c503b556d3"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.693570 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cff2733b-858c-4578-abcb-a0c503b556d3-config-data" (OuterVolumeSpecName: "config-data") pod "cff2733b-858c-4578-abcb-a0c503b556d3" (UID: "cff2733b-858c-4578-abcb-a0c503b556d3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.712625 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cff2733b-858c-4578-abcb-a0c503b556d3-server-conf" (OuterVolumeSpecName: "server-conf") pod "cff2733b-858c-4578-abcb-a0c503b556d3" (UID: "cff2733b-858c-4578-abcb-a0c503b556d3"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.724369 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.768278 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cff2733b-858c-4578-abcb-a0c503b556d3-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.768331 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jx7bs\" (UniqueName: \"kubernetes.io/projected/cff2733b-858c-4578-abcb-a0c503b556d3-kube-api-access-jx7bs\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.768344 4899 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cff2733b-858c-4578-abcb-a0c503b556d3-server-conf\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.768352 4899 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.768363 4899 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cff2733b-858c-4578-abcb-a0c503b556d3-pod-info\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.768385 4899 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cff2733b-858c-4578-abcb-a0c503b556d3-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.768415 4899 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.777990 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "cff2733b-858c-4578-abcb-a0c503b556d3" (UID: "cff2733b-858c-4578-abcb-a0c503b556d3"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.791020 4899 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.830646 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7415d874-aa51-4fc4-8b40-b487392c248c","Type":"ContainerStarted","Data":"4965b29911cda2e61a2fcb6d3abe00417942c51498e1050593334392fac8571f"} Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.835357 4899 generic.go:334] "Generic (PLEG): container finished" podID="cff2733b-858c-4578-abcb-a0c503b556d3" containerID="db44beff9f0057405792026074513811d0b482b82b223a76e2fae8a18ac6215d" exitCode=0 Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.835434 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.835460 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cff2733b-858c-4578-abcb-a0c503b556d3","Type":"ContainerDied","Data":"db44beff9f0057405792026074513811d0b482b82b223a76e2fae8a18ac6215d"} Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.835784 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cff2733b-858c-4578-abcb-a0c503b556d3","Type":"ContainerDied","Data":"7860aa71d0506d14566487d585a11b18d34ca470279e1e5ccd56971a85ab844c"} Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.835806 4899 scope.go:117] "RemoveContainer" containerID="db44beff9f0057405792026074513811d0b482b82b223a76e2fae8a18ac6215d" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.862943 4899 scope.go:117] "RemoveContainer" containerID="ee34ae64a9b2b48f629d7d07e1e2606394e7e159388889c38dbf46c3b11f31cc" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.870350 4899 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cff2733b-858c-4578-abcb-a0c503b556d3-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.870386 4899 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.885746 4899 scope.go:117] "RemoveContainer" containerID="db44beff9f0057405792026074513811d0b482b82b223a76e2fae8a18ac6215d" Oct 03 09:00:20 crc kubenswrapper[4899]: E1003 09:00:20.886873 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db44beff9f0057405792026074513811d0b482b82b223a76e2fae8a18ac6215d\": container with ID starting with db44beff9f0057405792026074513811d0b482b82b223a76e2fae8a18ac6215d not found: ID does not exist" containerID="db44beff9f0057405792026074513811d0b482b82b223a76e2fae8a18ac6215d" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.886942 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db44beff9f0057405792026074513811d0b482b82b223a76e2fae8a18ac6215d"} err="failed to get container status \"db44beff9f0057405792026074513811d0b482b82b223a76e2fae8a18ac6215d\": rpc error: code = NotFound desc = could not find container \"db44beff9f0057405792026074513811d0b482b82b223a76e2fae8a18ac6215d\": container with ID starting with db44beff9f0057405792026074513811d0b482b82b223a76e2fae8a18ac6215d not found: ID does not exist" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.886978 4899 scope.go:117] "RemoveContainer" containerID="ee34ae64a9b2b48f629d7d07e1e2606394e7e159388889c38dbf46c3b11f31cc" Oct 03 09:00:20 crc kubenswrapper[4899]: E1003 09:00:20.887454 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee34ae64a9b2b48f629d7d07e1e2606394e7e159388889c38dbf46c3b11f31cc\": container with ID starting with ee34ae64a9b2b48f629d7d07e1e2606394e7e159388889c38dbf46c3b11f31cc not found: ID does not exist" containerID="ee34ae64a9b2b48f629d7d07e1e2606394e7e159388889c38dbf46c3b11f31cc" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.887499 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee34ae64a9b2b48f629d7d07e1e2606394e7e159388889c38dbf46c3b11f31cc"} err="failed to get container status \"ee34ae64a9b2b48f629d7d07e1e2606394e7e159388889c38dbf46c3b11f31cc\": rpc error: code = NotFound desc = could not find container \"ee34ae64a9b2b48f629d7d07e1e2606394e7e159388889c38dbf46c3b11f31cc\": container with ID starting with ee34ae64a9b2b48f629d7d07e1e2606394e7e159388889c38dbf46c3b11f31cc not found: ID does not exist" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.894309 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.911667 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.919609 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 09:00:20 crc kubenswrapper[4899]: E1003 09:00:20.920153 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cff2733b-858c-4578-abcb-a0c503b556d3" containerName="setup-container" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.920843 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="cff2733b-858c-4578-abcb-a0c503b556d3" containerName="setup-container" Oct 03 09:00:20 crc kubenswrapper[4899]: E1003 09:00:20.921018 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cff2733b-858c-4578-abcb-a0c503b556d3" containerName="rabbitmq" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.921035 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="cff2733b-858c-4578-abcb-a0c503b556d3" containerName="rabbitmq" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.921481 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="cff2733b-858c-4578-abcb-a0c503b556d3" containerName="rabbitmq" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.922938 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.926731 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.927052 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.927220 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.927284 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.927365 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-54tdh" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.930955 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.931122 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 03 09:00:20 crc kubenswrapper[4899]: I1003 09:00:20.931240 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.073790 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/75734f37-27af-4b79-ac28-4546a092e218-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.073860 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/75734f37-27af-4b79-ac28-4546a092e218-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.074018 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/75734f37-27af-4b79-ac28-4546a092e218-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.074068 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/75734f37-27af-4b79-ac28-4546a092e218-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.074133 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/75734f37-27af-4b79-ac28-4546a092e218-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.074165 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/75734f37-27af-4b79-ac28-4546a092e218-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.074185 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/75734f37-27af-4b79-ac28-4546a092e218-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.074239 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/75734f37-27af-4b79-ac28-4546a092e218-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.074314 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqn6g\" (UniqueName: \"kubernetes.io/projected/75734f37-27af-4b79-ac28-4546a092e218-kube-api-access-wqn6g\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.074502 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.074569 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/75734f37-27af-4b79-ac28-4546a092e218-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.176748 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.176841 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/75734f37-27af-4b79-ac28-4546a092e218-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.176942 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/75734f37-27af-4b79-ac28-4546a092e218-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.176992 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/75734f37-27af-4b79-ac28-4546a092e218-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.177036 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/75734f37-27af-4b79-ac28-4546a092e218-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.177049 4899 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.177063 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/75734f37-27af-4b79-ac28-4546a092e218-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.177286 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/75734f37-27af-4b79-ac28-4546a092e218-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.177327 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/75734f37-27af-4b79-ac28-4546a092e218-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.177349 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/75734f37-27af-4b79-ac28-4546a092e218-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.177384 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/75734f37-27af-4b79-ac28-4546a092e218-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.177437 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqn6g\" (UniqueName: \"kubernetes.io/projected/75734f37-27af-4b79-ac28-4546a092e218-kube-api-access-wqn6g\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.178124 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/75734f37-27af-4b79-ac28-4546a092e218-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.178153 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/75734f37-27af-4b79-ac28-4546a092e218-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.178422 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/75734f37-27af-4b79-ac28-4546a092e218-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.182076 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/75734f37-27af-4b79-ac28-4546a092e218-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.182608 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/75734f37-27af-4b79-ac28-4546a092e218-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.185405 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/75734f37-27af-4b79-ac28-4546a092e218-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.190133 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/75734f37-27af-4b79-ac28-4546a092e218-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.192868 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/75734f37-27af-4b79-ac28-4546a092e218-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.192909 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/75734f37-27af-4b79-ac28-4546a092e218-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.196758 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqn6g\" (UniqueName: \"kubernetes.io/projected/75734f37-27af-4b79-ac28-4546a092e218-kube-api-access-wqn6g\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.240827 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"75734f37-27af-4b79-ac28-4546a092e218\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.272054 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.694025 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 09:00:21 crc kubenswrapper[4899]: W1003 09:00:21.744607 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod75734f37_27af_4b79_ac28_4546a092e218.slice/crio-61840b3e6507f4f0c0b20c6be551b1e764a72b7932dc85a4e71ab46e5664caef WatchSource:0}: Error finding container 61840b3e6507f4f0c0b20c6be551b1e764a72b7932dc85a4e71ab46e5664caef: Status 404 returned error can't find the container with id 61840b3e6507f4f0c0b20c6be551b1e764a72b7932dc85a4e71ab46e5664caef Oct 03 09:00:21 crc kubenswrapper[4899]: I1003 09:00:21.848089 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"75734f37-27af-4b79-ac28-4546a092e218","Type":"ContainerStarted","Data":"61840b3e6507f4f0c0b20c6be551b1e764a72b7932dc85a4e71ab46e5664caef"} Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.537763 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cff2733b-858c-4578-abcb-a0c503b556d3" path="/var/lib/kubelet/pods/cff2733b-858c-4578-abcb-a0c503b556d3/volumes" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.692280 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d558885bc-qjnvn"] Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.693959 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.696471 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.706343 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-qjnvn"] Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.809944 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-dns-swift-storage-0\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.810004 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-openstack-edpm-ipam\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.810062 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-ovsdbserver-nb\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.810092 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbs4v\" (UniqueName: \"kubernetes.io/projected/490471fb-1af8-457c-a8aa-67aa902005f7-kube-api-access-jbs4v\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.810205 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-config\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.810224 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-ovsdbserver-sb\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.810398 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-dns-svc\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.858625 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7415d874-aa51-4fc4-8b40-b487392c248c","Type":"ContainerStarted","Data":"db856a16aa6f52a18a1c1259923f581d8bca30fec12b2996e90cee5c3e6c88d7"} Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.911929 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-ovsdbserver-nb\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.911977 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbs4v\" (UniqueName: \"kubernetes.io/projected/490471fb-1af8-457c-a8aa-67aa902005f7-kube-api-access-jbs4v\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.912118 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-config\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.912134 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-ovsdbserver-sb\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.912910 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-ovsdbserver-nb\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.913202 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-config\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.913243 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-ovsdbserver-sb\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.913378 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-dns-svc\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.913492 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-dns-swift-storage-0\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.913531 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-openstack-edpm-ipam\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.914158 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-dns-swift-storage-0\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.914225 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-dns-svc\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.916141 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-openstack-edpm-ipam\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:22 crc kubenswrapper[4899]: I1003 09:00:22.931280 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbs4v\" (UniqueName: \"kubernetes.io/projected/490471fb-1af8-457c-a8aa-67aa902005f7-kube-api-access-jbs4v\") pod \"dnsmasq-dns-d558885bc-qjnvn\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:23 crc kubenswrapper[4899]: I1003 09:00:23.021779 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:23 crc kubenswrapper[4899]: I1003 09:00:23.441760 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-qjnvn"] Oct 03 09:00:23 crc kubenswrapper[4899]: W1003 09:00:23.442377 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod490471fb_1af8_457c_a8aa_67aa902005f7.slice/crio-d6e4c703fb808ed14a422cc6d5626372af84aa0aa2c5ab6c0be92f1453d0f924 WatchSource:0}: Error finding container d6e4c703fb808ed14a422cc6d5626372af84aa0aa2c5ab6c0be92f1453d0f924: Status 404 returned error can't find the container with id d6e4c703fb808ed14a422cc6d5626372af84aa0aa2c5ab6c0be92f1453d0f924 Oct 03 09:00:23 crc kubenswrapper[4899]: I1003 09:00:23.866918 4899 generic.go:334] "Generic (PLEG): container finished" podID="490471fb-1af8-457c-a8aa-67aa902005f7" containerID="249b9abd3e10b9af900e57a4b89ebcc334bf187c58edd8c26044f5eed7ef0152" exitCode=0 Oct 03 09:00:23 crc kubenswrapper[4899]: I1003 09:00:23.867039 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-qjnvn" event={"ID":"490471fb-1af8-457c-a8aa-67aa902005f7","Type":"ContainerDied","Data":"249b9abd3e10b9af900e57a4b89ebcc334bf187c58edd8c26044f5eed7ef0152"} Oct 03 09:00:23 crc kubenswrapper[4899]: I1003 09:00:23.867283 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-qjnvn" event={"ID":"490471fb-1af8-457c-a8aa-67aa902005f7","Type":"ContainerStarted","Data":"d6e4c703fb808ed14a422cc6d5626372af84aa0aa2c5ab6c0be92f1453d0f924"} Oct 03 09:00:23 crc kubenswrapper[4899]: I1003 09:00:23.870948 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"75734f37-27af-4b79-ac28-4546a092e218","Type":"ContainerStarted","Data":"83628776cd3ca34977d075f1f0d07b51293aa81f91f4635e7908d05f6eacbd28"} Oct 03 09:00:24 crc kubenswrapper[4899]: I1003 09:00:24.888737 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-qjnvn" event={"ID":"490471fb-1af8-457c-a8aa-67aa902005f7","Type":"ContainerStarted","Data":"4ca484f27e4d5703da414612522abfd54c77de66289e7837c5a3a37cebc92bac"} Oct 03 09:00:24 crc kubenswrapper[4899]: I1003 09:00:24.889030 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:24 crc kubenswrapper[4899]: I1003 09:00:24.917556 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-d558885bc-qjnvn" podStartSLOduration=2.917534979 podStartE2EDuration="2.917534979s" podCreationTimestamp="2025-10-03 09:00:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 09:00:24.903862627 +0000 UTC m=+1199.011347600" watchObservedRunningTime="2025-10-03 09:00:24.917534979 +0000 UTC m=+1199.025019932" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.023719 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.081145 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-4v2wj"] Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.081415 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" podUID="b8f5f08a-6776-49c8-8638-4225b9f222ab" containerName="dnsmasq-dns" containerID="cri-o://8e1af9d5ac5582fd108df81837412bf44847d6388424a15e7c8c4da11cf10682" gracePeriod=10 Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.213016 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78c64bc9c5-hbbsx"] Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.215105 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.241382 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78c64bc9c5-hbbsx"] Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.326996 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zgtz\" (UniqueName: \"kubernetes.io/projected/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-kube-api-access-6zgtz\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.327076 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-dns-svc\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.327102 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-config\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.327153 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-ovsdbserver-nb\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.327196 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-openstack-edpm-ipam\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.327219 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-dns-swift-storage-0\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.327237 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-ovsdbserver-sb\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.429163 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-openstack-edpm-ipam\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.429212 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-dns-swift-storage-0\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.430039 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-ovsdbserver-sb\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.429982 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-dns-swift-storage-0\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.430213 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-openstack-edpm-ipam\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.430662 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-ovsdbserver-sb\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.430766 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zgtz\" (UniqueName: \"kubernetes.io/projected/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-kube-api-access-6zgtz\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.430922 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-dns-svc\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.430960 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-config\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.431074 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-ovsdbserver-nb\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.431531 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-dns-svc\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.431735 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-config\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.431877 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-ovsdbserver-nb\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.455159 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zgtz\" (UniqueName: \"kubernetes.io/projected/b0cbf4db-6115-4cb0-8aa1-b773b07e37e4-kube-api-access-6zgtz\") pod \"dnsmasq-dns-78c64bc9c5-hbbsx\" (UID: \"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4\") " pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.589361 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.715272 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.737968 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-dns-swift-storage-0\") pod \"b8f5f08a-6776-49c8-8638-4225b9f222ab\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.738777 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tn54\" (UniqueName: \"kubernetes.io/projected/b8f5f08a-6776-49c8-8638-4225b9f222ab-kube-api-access-4tn54\") pod \"b8f5f08a-6776-49c8-8638-4225b9f222ab\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.738974 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-dns-svc\") pod \"b8f5f08a-6776-49c8-8638-4225b9f222ab\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.739049 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-ovsdbserver-nb\") pod \"b8f5f08a-6776-49c8-8638-4225b9f222ab\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.739103 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-config\") pod \"b8f5f08a-6776-49c8-8638-4225b9f222ab\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.739166 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-ovsdbserver-sb\") pod \"b8f5f08a-6776-49c8-8638-4225b9f222ab\" (UID: \"b8f5f08a-6776-49c8-8638-4225b9f222ab\") " Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.750077 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8f5f08a-6776-49c8-8638-4225b9f222ab-kube-api-access-4tn54" (OuterVolumeSpecName: "kube-api-access-4tn54") pod "b8f5f08a-6776-49c8-8638-4225b9f222ab" (UID: "b8f5f08a-6776-49c8-8638-4225b9f222ab"). InnerVolumeSpecName "kube-api-access-4tn54". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.822754 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b8f5f08a-6776-49c8-8638-4225b9f222ab" (UID: "b8f5f08a-6776-49c8-8638-4225b9f222ab"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.824534 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b8f5f08a-6776-49c8-8638-4225b9f222ab" (UID: "b8f5f08a-6776-49c8-8638-4225b9f222ab"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.825672 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b8f5f08a-6776-49c8-8638-4225b9f222ab" (UID: "b8f5f08a-6776-49c8-8638-4225b9f222ab"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.834834 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-config" (OuterVolumeSpecName: "config") pod "b8f5f08a-6776-49c8-8638-4225b9f222ab" (UID: "b8f5f08a-6776-49c8-8638-4225b9f222ab"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.841954 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-config\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.841989 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.842008 4899 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.842020 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tn54\" (UniqueName: \"kubernetes.io/projected/b8f5f08a-6776-49c8-8638-4225b9f222ab-kube-api-access-4tn54\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.842031 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.861500 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b8f5f08a-6776-49c8-8638-4225b9f222ab" (UID: "b8f5f08a-6776-49c8-8638-4225b9f222ab"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.944293 4899 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b8f5f08a-6776-49c8-8638-4225b9f222ab-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.963922 4899 generic.go:334] "Generic (PLEG): container finished" podID="b8f5f08a-6776-49c8-8638-4225b9f222ab" containerID="8e1af9d5ac5582fd108df81837412bf44847d6388424a15e7c8c4da11cf10682" exitCode=0 Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.963977 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.963978 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" event={"ID":"b8f5f08a-6776-49c8-8638-4225b9f222ab","Type":"ContainerDied","Data":"8e1af9d5ac5582fd108df81837412bf44847d6388424a15e7c8c4da11cf10682"} Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.964141 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-4v2wj" event={"ID":"b8f5f08a-6776-49c8-8638-4225b9f222ab","Type":"ContainerDied","Data":"cd1dda4790b1a954c812646ce2846638972f2ce31d3fedb5d95a909634e3d5f8"} Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.964169 4899 scope.go:117] "RemoveContainer" containerID="8e1af9d5ac5582fd108df81837412bf44847d6388424a15e7c8c4da11cf10682" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.994334 4899 scope.go:117] "RemoveContainer" containerID="f51be7daad81c7f0cdc0e8215173d7fcef4d0d16b746151e5884c62d564b0fd2" Oct 03 09:00:33 crc kubenswrapper[4899]: I1003 09:00:33.995175 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-4v2wj"] Oct 03 09:00:34 crc kubenswrapper[4899]: I1003 09:00:34.007403 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-4v2wj"] Oct 03 09:00:34 crc kubenswrapper[4899]: I1003 09:00:34.018596 4899 scope.go:117] "RemoveContainer" containerID="8e1af9d5ac5582fd108df81837412bf44847d6388424a15e7c8c4da11cf10682" Oct 03 09:00:34 crc kubenswrapper[4899]: E1003 09:00:34.020424 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e1af9d5ac5582fd108df81837412bf44847d6388424a15e7c8c4da11cf10682\": container with ID starting with 8e1af9d5ac5582fd108df81837412bf44847d6388424a15e7c8c4da11cf10682 not found: ID does not exist" containerID="8e1af9d5ac5582fd108df81837412bf44847d6388424a15e7c8c4da11cf10682" Oct 03 09:00:34 crc kubenswrapper[4899]: I1003 09:00:34.020455 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e1af9d5ac5582fd108df81837412bf44847d6388424a15e7c8c4da11cf10682"} err="failed to get container status \"8e1af9d5ac5582fd108df81837412bf44847d6388424a15e7c8c4da11cf10682\": rpc error: code = NotFound desc = could not find container \"8e1af9d5ac5582fd108df81837412bf44847d6388424a15e7c8c4da11cf10682\": container with ID starting with 8e1af9d5ac5582fd108df81837412bf44847d6388424a15e7c8c4da11cf10682 not found: ID does not exist" Oct 03 09:00:34 crc kubenswrapper[4899]: I1003 09:00:34.020475 4899 scope.go:117] "RemoveContainer" containerID="f51be7daad81c7f0cdc0e8215173d7fcef4d0d16b746151e5884c62d564b0fd2" Oct 03 09:00:34 crc kubenswrapper[4899]: E1003 09:00:34.020842 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f51be7daad81c7f0cdc0e8215173d7fcef4d0d16b746151e5884c62d564b0fd2\": container with ID starting with f51be7daad81c7f0cdc0e8215173d7fcef4d0d16b746151e5884c62d564b0fd2 not found: ID does not exist" containerID="f51be7daad81c7f0cdc0e8215173d7fcef4d0d16b746151e5884c62d564b0fd2" Oct 03 09:00:34 crc kubenswrapper[4899]: I1003 09:00:34.020873 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f51be7daad81c7f0cdc0e8215173d7fcef4d0d16b746151e5884c62d564b0fd2"} err="failed to get container status \"f51be7daad81c7f0cdc0e8215173d7fcef4d0d16b746151e5884c62d564b0fd2\": rpc error: code = NotFound desc = could not find container \"f51be7daad81c7f0cdc0e8215173d7fcef4d0d16b746151e5884c62d564b0fd2\": container with ID starting with f51be7daad81c7f0cdc0e8215173d7fcef4d0d16b746151e5884c62d564b0fd2 not found: ID does not exist" Oct 03 09:00:34 crc kubenswrapper[4899]: I1003 09:00:34.069033 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78c64bc9c5-hbbsx"] Oct 03 09:00:34 crc kubenswrapper[4899]: W1003 09:00:34.072141 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb0cbf4db_6115_4cb0_8aa1_b773b07e37e4.slice/crio-99283d01b93dd3fbaa5404e320b57cd6c58e39f2b78920bcc56d038863b0a440 WatchSource:0}: Error finding container 99283d01b93dd3fbaa5404e320b57cd6c58e39f2b78920bcc56d038863b0a440: Status 404 returned error can't find the container with id 99283d01b93dd3fbaa5404e320b57cd6c58e39f2b78920bcc56d038863b0a440 Oct 03 09:00:34 crc kubenswrapper[4899]: I1003 09:00:34.538118 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8f5f08a-6776-49c8-8638-4225b9f222ab" path="/var/lib/kubelet/pods/b8f5f08a-6776-49c8-8638-4225b9f222ab/volumes" Oct 03 09:00:34 crc kubenswrapper[4899]: I1003 09:00:34.973081 4899 generic.go:334] "Generic (PLEG): container finished" podID="b0cbf4db-6115-4cb0-8aa1-b773b07e37e4" containerID="5a50cb5e1216489cb5d1fb9571c6176a390897b90cffb6d6447530e76defc7b7" exitCode=0 Oct 03 09:00:34 crc kubenswrapper[4899]: I1003 09:00:34.973134 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" event={"ID":"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4","Type":"ContainerDied","Data":"5a50cb5e1216489cb5d1fb9571c6176a390897b90cffb6d6447530e76defc7b7"} Oct 03 09:00:34 crc kubenswrapper[4899]: I1003 09:00:34.973184 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" event={"ID":"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4","Type":"ContainerStarted","Data":"99283d01b93dd3fbaa5404e320b57cd6c58e39f2b78920bcc56d038863b0a440"} Oct 03 09:00:35 crc kubenswrapper[4899]: I1003 09:00:35.985060 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" event={"ID":"b0cbf4db-6115-4cb0-8aa1-b773b07e37e4","Type":"ContainerStarted","Data":"dc56330a72b4226925237113e5fa6c3c60a31f9b30af69c2e611716bf048d3e2"} Oct 03 09:00:35 crc kubenswrapper[4899]: I1003 09:00:35.985366 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:36 crc kubenswrapper[4899]: I1003 09:00:36.010340 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" podStartSLOduration=3.010319193 podStartE2EDuration="3.010319193s" podCreationTimestamp="2025-10-03 09:00:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 09:00:36.001394061 +0000 UTC m=+1210.108879034" watchObservedRunningTime="2025-10-03 09:00:36.010319193 +0000 UTC m=+1210.117804146" Oct 03 09:00:43 crc kubenswrapper[4899]: I1003 09:00:43.591506 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-78c64bc9c5-hbbsx" Oct 03 09:00:43 crc kubenswrapper[4899]: I1003 09:00:43.648962 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-qjnvn"] Oct 03 09:00:43 crc kubenswrapper[4899]: I1003 09:00:43.649208 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-d558885bc-qjnvn" podUID="490471fb-1af8-457c-a8aa-67aa902005f7" containerName="dnsmasq-dns" containerID="cri-o://4ca484f27e4d5703da414612522abfd54c77de66289e7837c5a3a37cebc92bac" gracePeriod=10 Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.072322 4899 generic.go:334] "Generic (PLEG): container finished" podID="490471fb-1af8-457c-a8aa-67aa902005f7" containerID="4ca484f27e4d5703da414612522abfd54c77de66289e7837c5a3a37cebc92bac" exitCode=0 Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.072447 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-qjnvn" event={"ID":"490471fb-1af8-457c-a8aa-67aa902005f7","Type":"ContainerDied","Data":"4ca484f27e4d5703da414612522abfd54c77de66289e7837c5a3a37cebc92bac"} Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.266710 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.321549 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-dns-svc\") pod \"490471fb-1af8-457c-a8aa-67aa902005f7\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.321610 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-dns-swift-storage-0\") pod \"490471fb-1af8-457c-a8aa-67aa902005f7\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.321645 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-ovsdbserver-nb\") pod \"490471fb-1af8-457c-a8aa-67aa902005f7\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.321716 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-ovsdbserver-sb\") pod \"490471fb-1af8-457c-a8aa-67aa902005f7\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.321749 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbs4v\" (UniqueName: \"kubernetes.io/projected/490471fb-1af8-457c-a8aa-67aa902005f7-kube-api-access-jbs4v\") pod \"490471fb-1af8-457c-a8aa-67aa902005f7\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.321817 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-config\") pod \"490471fb-1af8-457c-a8aa-67aa902005f7\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.321844 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-openstack-edpm-ipam\") pod \"490471fb-1af8-457c-a8aa-67aa902005f7\" (UID: \"490471fb-1af8-457c-a8aa-67aa902005f7\") " Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.343237 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/490471fb-1af8-457c-a8aa-67aa902005f7-kube-api-access-jbs4v" (OuterVolumeSpecName: "kube-api-access-jbs4v") pod "490471fb-1af8-457c-a8aa-67aa902005f7" (UID: "490471fb-1af8-457c-a8aa-67aa902005f7"). InnerVolumeSpecName "kube-api-access-jbs4v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.375349 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "490471fb-1af8-457c-a8aa-67aa902005f7" (UID: "490471fb-1af8-457c-a8aa-67aa902005f7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.385216 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "490471fb-1af8-457c-a8aa-67aa902005f7" (UID: "490471fb-1af8-457c-a8aa-67aa902005f7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.386107 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "490471fb-1af8-457c-a8aa-67aa902005f7" (UID: "490471fb-1af8-457c-a8aa-67aa902005f7"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.399977 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-config" (OuterVolumeSpecName: "config") pod "490471fb-1af8-457c-a8aa-67aa902005f7" (UID: "490471fb-1af8-457c-a8aa-67aa902005f7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.401495 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "490471fb-1af8-457c-a8aa-67aa902005f7" (UID: "490471fb-1af8-457c-a8aa-67aa902005f7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.401874 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "490471fb-1af8-457c-a8aa-67aa902005f7" (UID: "490471fb-1af8-457c-a8aa-67aa902005f7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.423303 4899 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.423329 4899 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.423340 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.423350 4899 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.423358 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbs4v\" (UniqueName: \"kubernetes.io/projected/490471fb-1af8-457c-a8aa-67aa902005f7-kube-api-access-jbs4v\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.423367 4899 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-config\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:44 crc kubenswrapper[4899]: I1003 09:00:44.423376 4899 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/490471fb-1af8-457c-a8aa-67aa902005f7-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 03 09:00:45 crc kubenswrapper[4899]: I1003 09:00:45.121060 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-qjnvn" event={"ID":"490471fb-1af8-457c-a8aa-67aa902005f7","Type":"ContainerDied","Data":"d6e4c703fb808ed14a422cc6d5626372af84aa0aa2c5ab6c0be92f1453d0f924"} Oct 03 09:00:45 crc kubenswrapper[4899]: I1003 09:00:45.121447 4899 scope.go:117] "RemoveContainer" containerID="4ca484f27e4d5703da414612522abfd54c77de66289e7837c5a3a37cebc92bac" Oct 03 09:00:45 crc kubenswrapper[4899]: I1003 09:00:45.121657 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-qjnvn" Oct 03 09:00:45 crc kubenswrapper[4899]: I1003 09:00:45.143201 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-qjnvn"] Oct 03 09:00:45 crc kubenswrapper[4899]: I1003 09:00:45.146507 4899 scope.go:117] "RemoveContainer" containerID="249b9abd3e10b9af900e57a4b89ebcc334bf187c58edd8c26044f5eed7ef0152" Oct 03 09:00:45 crc kubenswrapper[4899]: I1003 09:00:45.151103 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-qjnvn"] Oct 03 09:00:46 crc kubenswrapper[4899]: I1003 09:00:46.537133 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="490471fb-1af8-457c-a8aa-67aa902005f7" path="/var/lib/kubelet/pods/490471fb-1af8-457c-a8aa-67aa902005f7/volumes" Oct 03 09:00:55 crc kubenswrapper[4899]: I1003 09:00:55.205651 4899 generic.go:334] "Generic (PLEG): container finished" podID="7415d874-aa51-4fc4-8b40-b487392c248c" containerID="db856a16aa6f52a18a1c1259923f581d8bca30fec12b2996e90cee5c3e6c88d7" exitCode=0 Oct 03 09:00:55 crc kubenswrapper[4899]: I1003 09:00:55.205746 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7415d874-aa51-4fc4-8b40-b487392c248c","Type":"ContainerDied","Data":"db856a16aa6f52a18a1c1259923f581d8bca30fec12b2996e90cee5c3e6c88d7"} Oct 03 09:00:56 crc kubenswrapper[4899]: I1003 09:00:56.214921 4899 generic.go:334] "Generic (PLEG): container finished" podID="75734f37-27af-4b79-ac28-4546a092e218" containerID="83628776cd3ca34977d075f1f0d07b51293aa81f91f4635e7908d05f6eacbd28" exitCode=0 Oct 03 09:00:56 crc kubenswrapper[4899]: I1003 09:00:56.215010 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"75734f37-27af-4b79-ac28-4546a092e218","Type":"ContainerDied","Data":"83628776cd3ca34977d075f1f0d07b51293aa81f91f4635e7908d05f6eacbd28"} Oct 03 09:00:56 crc kubenswrapper[4899]: I1003 09:00:56.218173 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7415d874-aa51-4fc4-8b40-b487392c248c","Type":"ContainerStarted","Data":"bbbe4d3ac7fdd579ad7443c2ea9cbe2bfc0dbfe8280f63a0275c28f38c3a3ada"} Oct 03 09:00:56 crc kubenswrapper[4899]: I1003 09:00:56.218402 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 03 09:00:56 crc kubenswrapper[4899]: I1003 09:00:56.269524 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.269507501 podStartE2EDuration="37.269507501s" podCreationTimestamp="2025-10-03 09:00:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 09:00:56.265548945 +0000 UTC m=+1230.373033918" watchObservedRunningTime="2025-10-03 09:00:56.269507501 +0000 UTC m=+1230.376992454" Oct 03 09:00:56 crc kubenswrapper[4899]: I1003 09:00:56.906038 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp"] Oct 03 09:00:56 crc kubenswrapper[4899]: E1003 09:00:56.906651 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f5f08a-6776-49c8-8638-4225b9f222ab" containerName="dnsmasq-dns" Oct 03 09:00:56 crc kubenswrapper[4899]: I1003 09:00:56.906669 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f5f08a-6776-49c8-8638-4225b9f222ab" containerName="dnsmasq-dns" Oct 03 09:00:56 crc kubenswrapper[4899]: E1003 09:00:56.906681 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="490471fb-1af8-457c-a8aa-67aa902005f7" containerName="init" Oct 03 09:00:56 crc kubenswrapper[4899]: I1003 09:00:56.906687 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="490471fb-1af8-457c-a8aa-67aa902005f7" containerName="init" Oct 03 09:00:56 crc kubenswrapper[4899]: E1003 09:00:56.906706 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f5f08a-6776-49c8-8638-4225b9f222ab" containerName="init" Oct 03 09:00:56 crc kubenswrapper[4899]: I1003 09:00:56.906712 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f5f08a-6776-49c8-8638-4225b9f222ab" containerName="init" Oct 03 09:00:56 crc kubenswrapper[4899]: E1003 09:00:56.906739 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="490471fb-1af8-457c-a8aa-67aa902005f7" containerName="dnsmasq-dns" Oct 03 09:00:56 crc kubenswrapper[4899]: I1003 09:00:56.906745 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="490471fb-1af8-457c-a8aa-67aa902005f7" containerName="dnsmasq-dns" Oct 03 09:00:56 crc kubenswrapper[4899]: I1003 09:00:56.906930 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="490471fb-1af8-457c-a8aa-67aa902005f7" containerName="dnsmasq-dns" Oct 03 09:00:56 crc kubenswrapper[4899]: I1003 09:00:56.906962 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f5f08a-6776-49c8-8638-4225b9f222ab" containerName="dnsmasq-dns" Oct 03 09:00:56 crc kubenswrapper[4899]: I1003 09:00:56.907524 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" Oct 03 09:00:56 crc kubenswrapper[4899]: I1003 09:00:56.913361 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:00:56 crc kubenswrapper[4899]: I1003 09:00:56.913693 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:00:56 crc kubenswrapper[4899]: I1003 09:00:56.917179 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:00:56 crc kubenswrapper[4899]: I1003 09:00:56.917297 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:00:56 crc kubenswrapper[4899]: I1003 09:00:56.924673 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp"] Oct 03 09:00:57 crc kubenswrapper[4899]: I1003 09:00:57.072427 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5ae39e5-0bf8-4627-8e97-7162c0861524-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp\" (UID: \"b5ae39e5-0bf8-4627-8e97-7162c0861524\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" Oct 03 09:00:57 crc kubenswrapper[4899]: I1003 09:00:57.072504 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfb74\" (UniqueName: \"kubernetes.io/projected/b5ae39e5-0bf8-4627-8e97-7162c0861524-kube-api-access-qfb74\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp\" (UID: \"b5ae39e5-0bf8-4627-8e97-7162c0861524\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" Oct 03 09:00:57 crc kubenswrapper[4899]: I1003 09:00:57.072555 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b5ae39e5-0bf8-4627-8e97-7162c0861524-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp\" (UID: \"b5ae39e5-0bf8-4627-8e97-7162c0861524\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" Oct 03 09:00:57 crc kubenswrapper[4899]: I1003 09:00:57.072623 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5ae39e5-0bf8-4627-8e97-7162c0861524-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp\" (UID: \"b5ae39e5-0bf8-4627-8e97-7162c0861524\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" Oct 03 09:00:57 crc kubenswrapper[4899]: I1003 09:00:57.174151 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5ae39e5-0bf8-4627-8e97-7162c0861524-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp\" (UID: \"b5ae39e5-0bf8-4627-8e97-7162c0861524\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" Oct 03 09:00:57 crc kubenswrapper[4899]: I1003 09:00:57.174229 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfb74\" (UniqueName: \"kubernetes.io/projected/b5ae39e5-0bf8-4627-8e97-7162c0861524-kube-api-access-qfb74\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp\" (UID: \"b5ae39e5-0bf8-4627-8e97-7162c0861524\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" Oct 03 09:00:57 crc kubenswrapper[4899]: I1003 09:00:57.174275 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b5ae39e5-0bf8-4627-8e97-7162c0861524-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp\" (UID: \"b5ae39e5-0bf8-4627-8e97-7162c0861524\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" Oct 03 09:00:57 crc kubenswrapper[4899]: I1003 09:00:57.174310 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5ae39e5-0bf8-4627-8e97-7162c0861524-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp\" (UID: \"b5ae39e5-0bf8-4627-8e97-7162c0861524\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" Oct 03 09:00:57 crc kubenswrapper[4899]: I1003 09:00:57.180378 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b5ae39e5-0bf8-4627-8e97-7162c0861524-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp\" (UID: \"b5ae39e5-0bf8-4627-8e97-7162c0861524\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" Oct 03 09:00:57 crc kubenswrapper[4899]: I1003 09:00:57.180396 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5ae39e5-0bf8-4627-8e97-7162c0861524-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp\" (UID: \"b5ae39e5-0bf8-4627-8e97-7162c0861524\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" Oct 03 09:00:57 crc kubenswrapper[4899]: I1003 09:00:57.191796 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5ae39e5-0bf8-4627-8e97-7162c0861524-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp\" (UID: \"b5ae39e5-0bf8-4627-8e97-7162c0861524\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" Oct 03 09:00:57 crc kubenswrapper[4899]: I1003 09:00:57.195636 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfb74\" (UniqueName: \"kubernetes.io/projected/b5ae39e5-0bf8-4627-8e97-7162c0861524-kube-api-access-qfb74\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp\" (UID: \"b5ae39e5-0bf8-4627-8e97-7162c0861524\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" Oct 03 09:00:57 crc kubenswrapper[4899]: I1003 09:00:57.227269 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" Oct 03 09:00:57 crc kubenswrapper[4899]: I1003 09:00:57.228930 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"75734f37-27af-4b79-ac28-4546a092e218","Type":"ContainerStarted","Data":"369af25aa0a4bcf336b21a32b5fe97470ceaec97a99b6d5b12df95e312fba5ba"} Oct 03 09:00:57 crc kubenswrapper[4899]: I1003 09:00:57.229290 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:00:57 crc kubenswrapper[4899]: I1003 09:00:57.252593 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.252573631 podStartE2EDuration="37.252573631s" podCreationTimestamp="2025-10-03 09:00:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 09:00:57.250717454 +0000 UTC m=+1231.358202417" watchObservedRunningTime="2025-10-03 09:00:57.252573631 +0000 UTC m=+1231.360058584" Oct 03 09:00:57 crc kubenswrapper[4899]: I1003 09:00:57.772133 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp"] Oct 03 09:00:57 crc kubenswrapper[4899]: I1003 09:00:57.777375 4899 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 09:00:58 crc kubenswrapper[4899]: I1003 09:00:58.239446 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" event={"ID":"b5ae39e5-0bf8-4627-8e97-7162c0861524","Type":"ContainerStarted","Data":"77bf9eba2513c6a5134c41857600e306566f742288c698031ccc3e33a9f8414b"} Oct 03 09:01:00 crc kubenswrapper[4899]: I1003 09:01:00.143115 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29324701-75rw5"] Oct 03 09:01:00 crc kubenswrapper[4899]: I1003 09:01:00.145189 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29324701-75rw5" Oct 03 09:01:00 crc kubenswrapper[4899]: I1003 09:01:00.158614 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29324701-75rw5"] Oct 03 09:01:00 crc kubenswrapper[4899]: I1003 09:01:00.237005 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dad5826d-6e25-43af-9916-de4fd15faa3a-combined-ca-bundle\") pod \"keystone-cron-29324701-75rw5\" (UID: \"dad5826d-6e25-43af-9916-de4fd15faa3a\") " pod="openstack/keystone-cron-29324701-75rw5" Oct 03 09:01:00 crc kubenswrapper[4899]: I1003 09:01:00.237205 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dad5826d-6e25-43af-9916-de4fd15faa3a-config-data\") pod \"keystone-cron-29324701-75rw5\" (UID: \"dad5826d-6e25-43af-9916-de4fd15faa3a\") " pod="openstack/keystone-cron-29324701-75rw5" Oct 03 09:01:00 crc kubenswrapper[4899]: I1003 09:01:00.237239 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dad5826d-6e25-43af-9916-de4fd15faa3a-fernet-keys\") pod \"keystone-cron-29324701-75rw5\" (UID: \"dad5826d-6e25-43af-9916-de4fd15faa3a\") " pod="openstack/keystone-cron-29324701-75rw5" Oct 03 09:01:00 crc kubenswrapper[4899]: I1003 09:01:00.237289 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c46p9\" (UniqueName: \"kubernetes.io/projected/dad5826d-6e25-43af-9916-de4fd15faa3a-kube-api-access-c46p9\") pod \"keystone-cron-29324701-75rw5\" (UID: \"dad5826d-6e25-43af-9916-de4fd15faa3a\") " pod="openstack/keystone-cron-29324701-75rw5" Oct 03 09:01:00 crc kubenswrapper[4899]: I1003 09:01:00.338699 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dad5826d-6e25-43af-9916-de4fd15faa3a-config-data\") pod \"keystone-cron-29324701-75rw5\" (UID: \"dad5826d-6e25-43af-9916-de4fd15faa3a\") " pod="openstack/keystone-cron-29324701-75rw5" Oct 03 09:01:00 crc kubenswrapper[4899]: I1003 09:01:00.338746 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dad5826d-6e25-43af-9916-de4fd15faa3a-fernet-keys\") pod \"keystone-cron-29324701-75rw5\" (UID: \"dad5826d-6e25-43af-9916-de4fd15faa3a\") " pod="openstack/keystone-cron-29324701-75rw5" Oct 03 09:01:00 crc kubenswrapper[4899]: I1003 09:01:00.338788 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c46p9\" (UniqueName: \"kubernetes.io/projected/dad5826d-6e25-43af-9916-de4fd15faa3a-kube-api-access-c46p9\") pod \"keystone-cron-29324701-75rw5\" (UID: \"dad5826d-6e25-43af-9916-de4fd15faa3a\") " pod="openstack/keystone-cron-29324701-75rw5" Oct 03 09:01:00 crc kubenswrapper[4899]: I1003 09:01:00.338815 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dad5826d-6e25-43af-9916-de4fd15faa3a-combined-ca-bundle\") pod \"keystone-cron-29324701-75rw5\" (UID: \"dad5826d-6e25-43af-9916-de4fd15faa3a\") " pod="openstack/keystone-cron-29324701-75rw5" Oct 03 09:01:00 crc kubenswrapper[4899]: I1003 09:01:00.345570 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dad5826d-6e25-43af-9916-de4fd15faa3a-combined-ca-bundle\") pod \"keystone-cron-29324701-75rw5\" (UID: \"dad5826d-6e25-43af-9916-de4fd15faa3a\") " pod="openstack/keystone-cron-29324701-75rw5" Oct 03 09:01:00 crc kubenswrapper[4899]: I1003 09:01:00.345604 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dad5826d-6e25-43af-9916-de4fd15faa3a-config-data\") pod \"keystone-cron-29324701-75rw5\" (UID: \"dad5826d-6e25-43af-9916-de4fd15faa3a\") " pod="openstack/keystone-cron-29324701-75rw5" Oct 03 09:01:00 crc kubenswrapper[4899]: I1003 09:01:00.356324 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dad5826d-6e25-43af-9916-de4fd15faa3a-fernet-keys\") pod \"keystone-cron-29324701-75rw5\" (UID: \"dad5826d-6e25-43af-9916-de4fd15faa3a\") " pod="openstack/keystone-cron-29324701-75rw5" Oct 03 09:01:00 crc kubenswrapper[4899]: I1003 09:01:00.359127 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c46p9\" (UniqueName: \"kubernetes.io/projected/dad5826d-6e25-43af-9916-de4fd15faa3a-kube-api-access-c46p9\") pod \"keystone-cron-29324701-75rw5\" (UID: \"dad5826d-6e25-43af-9916-de4fd15faa3a\") " pod="openstack/keystone-cron-29324701-75rw5" Oct 03 09:01:00 crc kubenswrapper[4899]: I1003 09:01:00.473481 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29324701-75rw5" Oct 03 09:01:01 crc kubenswrapper[4899]: I1003 09:01:01.002465 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29324701-75rw5"] Oct 03 09:01:01 crc kubenswrapper[4899]: I1003 09:01:01.278482 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29324701-75rw5" event={"ID":"dad5826d-6e25-43af-9916-de4fd15faa3a","Type":"ContainerStarted","Data":"cf670e3cf8ae29f20dcfcd8843d2e386eb18d88687f2a2f7544cdc278817941c"} Oct 03 09:01:01 crc kubenswrapper[4899]: I1003 09:01:01.278822 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29324701-75rw5" event={"ID":"dad5826d-6e25-43af-9916-de4fd15faa3a","Type":"ContainerStarted","Data":"bb45b2dddd6019d7745af264a0b1297e4324938c46fe9d6e1273a9c1ac79da07"} Oct 03 09:01:01 crc kubenswrapper[4899]: I1003 09:01:01.306640 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29324701-75rw5" podStartSLOduration=1.306623928 podStartE2EDuration="1.306623928s" podCreationTimestamp="2025-10-03 09:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 09:01:01.295870439 +0000 UTC m=+1235.403355392" watchObservedRunningTime="2025-10-03 09:01:01.306623928 +0000 UTC m=+1235.414108881" Oct 03 09:01:03 crc kubenswrapper[4899]: I1003 09:01:03.300056 4899 generic.go:334] "Generic (PLEG): container finished" podID="dad5826d-6e25-43af-9916-de4fd15faa3a" containerID="cf670e3cf8ae29f20dcfcd8843d2e386eb18d88687f2a2f7544cdc278817941c" exitCode=0 Oct 03 09:01:03 crc kubenswrapper[4899]: I1003 09:01:03.300161 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29324701-75rw5" event={"ID":"dad5826d-6e25-43af-9916-de4fd15faa3a","Type":"ContainerDied","Data":"cf670e3cf8ae29f20dcfcd8843d2e386eb18d88687f2a2f7544cdc278817941c"} Oct 03 09:01:07 crc kubenswrapper[4899]: I1003 09:01:07.945425 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29324701-75rw5" Oct 03 09:01:08 crc kubenswrapper[4899]: I1003 09:01:08.140533 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dad5826d-6e25-43af-9916-de4fd15faa3a-config-data\") pod \"dad5826d-6e25-43af-9916-de4fd15faa3a\" (UID: \"dad5826d-6e25-43af-9916-de4fd15faa3a\") " Oct 03 09:01:08 crc kubenswrapper[4899]: I1003 09:01:08.140618 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c46p9\" (UniqueName: \"kubernetes.io/projected/dad5826d-6e25-43af-9916-de4fd15faa3a-kube-api-access-c46p9\") pod \"dad5826d-6e25-43af-9916-de4fd15faa3a\" (UID: \"dad5826d-6e25-43af-9916-de4fd15faa3a\") " Oct 03 09:01:08 crc kubenswrapper[4899]: I1003 09:01:08.140639 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dad5826d-6e25-43af-9916-de4fd15faa3a-fernet-keys\") pod \"dad5826d-6e25-43af-9916-de4fd15faa3a\" (UID: \"dad5826d-6e25-43af-9916-de4fd15faa3a\") " Oct 03 09:01:08 crc kubenswrapper[4899]: I1003 09:01:08.140676 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dad5826d-6e25-43af-9916-de4fd15faa3a-combined-ca-bundle\") pod \"dad5826d-6e25-43af-9916-de4fd15faa3a\" (UID: \"dad5826d-6e25-43af-9916-de4fd15faa3a\") " Oct 03 09:01:08 crc kubenswrapper[4899]: I1003 09:01:08.145050 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dad5826d-6e25-43af-9916-de4fd15faa3a-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "dad5826d-6e25-43af-9916-de4fd15faa3a" (UID: "dad5826d-6e25-43af-9916-de4fd15faa3a"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:01:08 crc kubenswrapper[4899]: I1003 09:01:08.159239 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dad5826d-6e25-43af-9916-de4fd15faa3a-kube-api-access-c46p9" (OuterVolumeSpecName: "kube-api-access-c46p9") pod "dad5826d-6e25-43af-9916-de4fd15faa3a" (UID: "dad5826d-6e25-43af-9916-de4fd15faa3a"). InnerVolumeSpecName "kube-api-access-c46p9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:01:08 crc kubenswrapper[4899]: I1003 09:01:08.166055 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dad5826d-6e25-43af-9916-de4fd15faa3a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dad5826d-6e25-43af-9916-de4fd15faa3a" (UID: "dad5826d-6e25-43af-9916-de4fd15faa3a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:01:08 crc kubenswrapper[4899]: I1003 09:01:08.188637 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dad5826d-6e25-43af-9916-de4fd15faa3a-config-data" (OuterVolumeSpecName: "config-data") pod "dad5826d-6e25-43af-9916-de4fd15faa3a" (UID: "dad5826d-6e25-43af-9916-de4fd15faa3a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:01:08 crc kubenswrapper[4899]: I1003 09:01:08.242763 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dad5826d-6e25-43af-9916-de4fd15faa3a-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 09:01:08 crc kubenswrapper[4899]: I1003 09:01:08.242794 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c46p9\" (UniqueName: \"kubernetes.io/projected/dad5826d-6e25-43af-9916-de4fd15faa3a-kube-api-access-c46p9\") on node \"crc\" DevicePath \"\"" Oct 03 09:01:08 crc kubenswrapper[4899]: I1003 09:01:08.242807 4899 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dad5826d-6e25-43af-9916-de4fd15faa3a-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 03 09:01:08 crc kubenswrapper[4899]: I1003 09:01:08.242817 4899 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dad5826d-6e25-43af-9916-de4fd15faa3a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 09:01:08 crc kubenswrapper[4899]: I1003 09:01:08.354117 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" event={"ID":"b5ae39e5-0bf8-4627-8e97-7162c0861524","Type":"ContainerStarted","Data":"4478010e0772a568167ebe0b6ca6df21a92e116cf3148acc39ab11aff49e9301"} Oct 03 09:01:08 crc kubenswrapper[4899]: I1003 09:01:08.355740 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29324701-75rw5" event={"ID":"dad5826d-6e25-43af-9916-de4fd15faa3a","Type":"ContainerDied","Data":"bb45b2dddd6019d7745af264a0b1297e4324938c46fe9d6e1273a9c1ac79da07"} Oct 03 09:01:08 crc kubenswrapper[4899]: I1003 09:01:08.355768 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb45b2dddd6019d7745af264a0b1297e4324938c46fe9d6e1273a9c1ac79da07" Oct 03 09:01:08 crc kubenswrapper[4899]: I1003 09:01:08.355813 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29324701-75rw5" Oct 03 09:01:08 crc kubenswrapper[4899]: I1003 09:01:08.375805 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" podStartSLOduration=2.347963767 podStartE2EDuration="12.37578588s" podCreationTimestamp="2025-10-03 09:00:56 +0000 UTC" firstStartedPulling="2025-10-03 09:00:57.777085272 +0000 UTC m=+1231.884570225" lastFinishedPulling="2025-10-03 09:01:07.804907385 +0000 UTC m=+1241.912392338" observedRunningTime="2025-10-03 09:01:08.374384526 +0000 UTC m=+1242.481869499" watchObservedRunningTime="2025-10-03 09:01:08.37578588 +0000 UTC m=+1242.483270833" Oct 03 09:01:10 crc kubenswrapper[4899]: I1003 09:01:10.221114 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 03 09:01:11 crc kubenswrapper[4899]: I1003 09:01:11.275068 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 03 09:01:19 crc kubenswrapper[4899]: I1003 09:01:19.463684 4899 generic.go:334] "Generic (PLEG): container finished" podID="b5ae39e5-0bf8-4627-8e97-7162c0861524" containerID="4478010e0772a568167ebe0b6ca6df21a92e116cf3148acc39ab11aff49e9301" exitCode=0 Oct 03 09:01:19 crc kubenswrapper[4899]: I1003 09:01:19.463766 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" event={"ID":"b5ae39e5-0bf8-4627-8e97-7162c0861524","Type":"ContainerDied","Data":"4478010e0772a568167ebe0b6ca6df21a92e116cf3148acc39ab11aff49e9301"} Oct 03 09:01:20 crc kubenswrapper[4899]: I1003 09:01:20.845268 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" Oct 03 09:01:20 crc kubenswrapper[4899]: I1003 09:01:20.984077 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfb74\" (UniqueName: \"kubernetes.io/projected/b5ae39e5-0bf8-4627-8e97-7162c0861524-kube-api-access-qfb74\") pod \"b5ae39e5-0bf8-4627-8e97-7162c0861524\" (UID: \"b5ae39e5-0bf8-4627-8e97-7162c0861524\") " Oct 03 09:01:20 crc kubenswrapper[4899]: I1003 09:01:20.984224 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b5ae39e5-0bf8-4627-8e97-7162c0861524-ssh-key\") pod \"b5ae39e5-0bf8-4627-8e97-7162c0861524\" (UID: \"b5ae39e5-0bf8-4627-8e97-7162c0861524\") " Oct 03 09:01:20 crc kubenswrapper[4899]: I1003 09:01:20.984242 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5ae39e5-0bf8-4627-8e97-7162c0861524-inventory\") pod \"b5ae39e5-0bf8-4627-8e97-7162c0861524\" (UID: \"b5ae39e5-0bf8-4627-8e97-7162c0861524\") " Oct 03 09:01:20 crc kubenswrapper[4899]: I1003 09:01:20.984263 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5ae39e5-0bf8-4627-8e97-7162c0861524-repo-setup-combined-ca-bundle\") pod \"b5ae39e5-0bf8-4627-8e97-7162c0861524\" (UID: \"b5ae39e5-0bf8-4627-8e97-7162c0861524\") " Oct 03 09:01:20 crc kubenswrapper[4899]: I1003 09:01:20.990140 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5ae39e5-0bf8-4627-8e97-7162c0861524-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "b5ae39e5-0bf8-4627-8e97-7162c0861524" (UID: "b5ae39e5-0bf8-4627-8e97-7162c0861524"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:01:20 crc kubenswrapper[4899]: I1003 09:01:20.990366 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5ae39e5-0bf8-4627-8e97-7162c0861524-kube-api-access-qfb74" (OuterVolumeSpecName: "kube-api-access-qfb74") pod "b5ae39e5-0bf8-4627-8e97-7162c0861524" (UID: "b5ae39e5-0bf8-4627-8e97-7162c0861524"). InnerVolumeSpecName "kube-api-access-qfb74". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.012292 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5ae39e5-0bf8-4627-8e97-7162c0861524-inventory" (OuterVolumeSpecName: "inventory") pod "b5ae39e5-0bf8-4627-8e97-7162c0861524" (UID: "b5ae39e5-0bf8-4627-8e97-7162c0861524"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.012481 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5ae39e5-0bf8-4627-8e97-7162c0861524-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b5ae39e5-0bf8-4627-8e97-7162c0861524" (UID: "b5ae39e5-0bf8-4627-8e97-7162c0861524"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.086680 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfb74\" (UniqueName: \"kubernetes.io/projected/b5ae39e5-0bf8-4627-8e97-7162c0861524-kube-api-access-qfb74\") on node \"crc\" DevicePath \"\"" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.087094 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b5ae39e5-0bf8-4627-8e97-7162c0861524-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.087108 4899 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5ae39e5-0bf8-4627-8e97-7162c0861524-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.087120 4899 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5ae39e5-0bf8-4627-8e97-7162c0861524-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.480047 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" event={"ID":"b5ae39e5-0bf8-4627-8e97-7162c0861524","Type":"ContainerDied","Data":"77bf9eba2513c6a5134c41857600e306566f742288c698031ccc3e33a9f8414b"} Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.480091 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77bf9eba2513c6a5134c41857600e306566f742288c698031ccc3e33a9f8414b" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.480098 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.552684 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd"] Oct 03 09:01:21 crc kubenswrapper[4899]: E1003 09:01:21.553231 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dad5826d-6e25-43af-9916-de4fd15faa3a" containerName="keystone-cron" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.553283 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="dad5826d-6e25-43af-9916-de4fd15faa3a" containerName="keystone-cron" Oct 03 09:01:21 crc kubenswrapper[4899]: E1003 09:01:21.553309 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5ae39e5-0bf8-4627-8e97-7162c0861524" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.553317 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5ae39e5-0bf8-4627-8e97-7162c0861524" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.553553 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="dad5826d-6e25-43af-9916-de4fd15faa3a" containerName="keystone-cron" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.553592 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5ae39e5-0bf8-4627-8e97-7162c0861524" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.555132 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.559921 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.560282 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.560434 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.560551 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.565248 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd"] Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.596725 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5dcd890-e7fa-4739-919b-6b1b77ff741a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-9rwqd\" (UID: \"f5dcd890-e7fa-4739-919b-6b1b77ff741a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.596773 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f5dcd890-e7fa-4739-919b-6b1b77ff741a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-9rwqd\" (UID: \"f5dcd890-e7fa-4739-919b-6b1b77ff741a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.596821 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hj8s\" (UniqueName: \"kubernetes.io/projected/f5dcd890-e7fa-4739-919b-6b1b77ff741a-kube-api-access-4hj8s\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-9rwqd\" (UID: \"f5dcd890-e7fa-4739-919b-6b1b77ff741a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.698214 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5dcd890-e7fa-4739-919b-6b1b77ff741a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-9rwqd\" (UID: \"f5dcd890-e7fa-4739-919b-6b1b77ff741a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.698316 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f5dcd890-e7fa-4739-919b-6b1b77ff741a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-9rwqd\" (UID: \"f5dcd890-e7fa-4739-919b-6b1b77ff741a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.698465 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hj8s\" (UniqueName: \"kubernetes.io/projected/f5dcd890-e7fa-4739-919b-6b1b77ff741a-kube-api-access-4hj8s\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-9rwqd\" (UID: \"f5dcd890-e7fa-4739-919b-6b1b77ff741a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.703495 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f5dcd890-e7fa-4739-919b-6b1b77ff741a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-9rwqd\" (UID: \"f5dcd890-e7fa-4739-919b-6b1b77ff741a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.710182 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5dcd890-e7fa-4739-919b-6b1b77ff741a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-9rwqd\" (UID: \"f5dcd890-e7fa-4739-919b-6b1b77ff741a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.717498 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hj8s\" (UniqueName: \"kubernetes.io/projected/f5dcd890-e7fa-4739-919b-6b1b77ff741a-kube-api-access-4hj8s\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-9rwqd\" (UID: \"f5dcd890-e7fa-4739-919b-6b1b77ff741a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd" Oct 03 09:01:21 crc kubenswrapper[4899]: I1003 09:01:21.885564 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd" Oct 03 09:01:22 crc kubenswrapper[4899]: I1003 09:01:22.357307 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd"] Oct 03 09:01:22 crc kubenswrapper[4899]: I1003 09:01:22.490114 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd" event={"ID":"f5dcd890-e7fa-4739-919b-6b1b77ff741a","Type":"ContainerStarted","Data":"8ab0b66090aff9359299135f28bad154f3f01db1031467739b638ca6d99d208d"} Oct 03 09:01:23 crc kubenswrapper[4899]: I1003 09:01:23.504421 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd" event={"ID":"f5dcd890-e7fa-4739-919b-6b1b77ff741a","Type":"ContainerStarted","Data":"7a9f0b1045cf3a2e77e85213be486963d2676aa1edb6d27b2b367bec3993e233"} Oct 03 09:01:23 crc kubenswrapper[4899]: I1003 09:01:23.530410 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd" podStartSLOduration=1.736722469 podStartE2EDuration="2.530389575s" podCreationTimestamp="2025-10-03 09:01:21 +0000 UTC" firstStartedPulling="2025-10-03 09:01:22.362677036 +0000 UTC m=+1256.470161989" lastFinishedPulling="2025-10-03 09:01:23.156344132 +0000 UTC m=+1257.263829095" observedRunningTime="2025-10-03 09:01:23.517846739 +0000 UTC m=+1257.625331692" watchObservedRunningTime="2025-10-03 09:01:23.530389575 +0000 UTC m=+1257.637874518" Oct 03 09:01:26 crc kubenswrapper[4899]: I1003 09:01:26.541175 4899 generic.go:334] "Generic (PLEG): container finished" podID="f5dcd890-e7fa-4739-919b-6b1b77ff741a" containerID="7a9f0b1045cf3a2e77e85213be486963d2676aa1edb6d27b2b367bec3993e233" exitCode=0 Oct 03 09:01:26 crc kubenswrapper[4899]: I1003 09:01:26.544007 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd" event={"ID":"f5dcd890-e7fa-4739-919b-6b1b77ff741a","Type":"ContainerDied","Data":"7a9f0b1045cf3a2e77e85213be486963d2676aa1edb6d27b2b367bec3993e233"} Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.031990 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.212334 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hj8s\" (UniqueName: \"kubernetes.io/projected/f5dcd890-e7fa-4739-919b-6b1b77ff741a-kube-api-access-4hj8s\") pod \"f5dcd890-e7fa-4739-919b-6b1b77ff741a\" (UID: \"f5dcd890-e7fa-4739-919b-6b1b77ff741a\") " Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.212419 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5dcd890-e7fa-4739-919b-6b1b77ff741a-inventory\") pod \"f5dcd890-e7fa-4739-919b-6b1b77ff741a\" (UID: \"f5dcd890-e7fa-4739-919b-6b1b77ff741a\") " Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.212658 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f5dcd890-e7fa-4739-919b-6b1b77ff741a-ssh-key\") pod \"f5dcd890-e7fa-4739-919b-6b1b77ff741a\" (UID: \"f5dcd890-e7fa-4739-919b-6b1b77ff741a\") " Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.217344 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5dcd890-e7fa-4739-919b-6b1b77ff741a-kube-api-access-4hj8s" (OuterVolumeSpecName: "kube-api-access-4hj8s") pod "f5dcd890-e7fa-4739-919b-6b1b77ff741a" (UID: "f5dcd890-e7fa-4739-919b-6b1b77ff741a"). InnerVolumeSpecName "kube-api-access-4hj8s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.240874 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5dcd890-e7fa-4739-919b-6b1b77ff741a-inventory" (OuterVolumeSpecName: "inventory") pod "f5dcd890-e7fa-4739-919b-6b1b77ff741a" (UID: "f5dcd890-e7fa-4739-919b-6b1b77ff741a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.243281 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5dcd890-e7fa-4739-919b-6b1b77ff741a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f5dcd890-e7fa-4739-919b-6b1b77ff741a" (UID: "f5dcd890-e7fa-4739-919b-6b1b77ff741a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.314216 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f5dcd890-e7fa-4739-919b-6b1b77ff741a-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.314253 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hj8s\" (UniqueName: \"kubernetes.io/projected/f5dcd890-e7fa-4739-919b-6b1b77ff741a-kube-api-access-4hj8s\") on node \"crc\" DevicePath \"\"" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.314288 4899 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5dcd890-e7fa-4739-919b-6b1b77ff741a-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.558138 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd" event={"ID":"f5dcd890-e7fa-4739-919b-6b1b77ff741a","Type":"ContainerDied","Data":"8ab0b66090aff9359299135f28bad154f3f01db1031467739b638ca6d99d208d"} Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.558181 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9rwqd" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.558183 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ab0b66090aff9359299135f28bad154f3f01db1031467739b638ca6d99d208d" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.620442 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb"] Oct 03 09:01:28 crc kubenswrapper[4899]: E1003 09:01:28.620783 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5dcd890-e7fa-4739-919b-6b1b77ff741a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.620801 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5dcd890-e7fa-4739-919b-6b1b77ff741a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.621042 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5dcd890-e7fa-4739-919b-6b1b77ff741a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.621945 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.624401 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.624487 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.624507 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.625737 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.637105 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb"] Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.723032 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89rr9\" (UniqueName: \"kubernetes.io/projected/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-kube-api-access-89rr9\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb\" (UID: \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.723147 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb\" (UID: \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.723208 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb\" (UID: \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.723404 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb\" (UID: \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.824106 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89rr9\" (UniqueName: \"kubernetes.io/projected/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-kube-api-access-89rr9\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb\" (UID: \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.824159 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb\" (UID: \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.824186 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb\" (UID: \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.824296 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb\" (UID: \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.827832 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb\" (UID: \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.827848 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb\" (UID: \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.829605 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb\" (UID: \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.842982 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89rr9\" (UniqueName: \"kubernetes.io/projected/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-kube-api-access-89rr9\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb\" (UID: \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" Oct 03 09:01:28 crc kubenswrapper[4899]: I1003 09:01:28.941458 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" Oct 03 09:01:29 crc kubenswrapper[4899]: I1003 09:01:29.436261 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb"] Oct 03 09:01:29 crc kubenswrapper[4899]: I1003 09:01:29.567065 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" event={"ID":"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37","Type":"ContainerStarted","Data":"4105f3b4a54c08517b8e5d4fa08d7e7a6525ae912f824967bf45c92e4f5ab148"} Oct 03 09:01:30 crc kubenswrapper[4899]: I1003 09:01:30.578669 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" event={"ID":"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37","Type":"ContainerStarted","Data":"fe8201fbb8672ca9e1221ff63409aeb74c6c0a28ecc101fd061b5ff5f82c8620"} Oct 03 09:01:30 crc kubenswrapper[4899]: I1003 09:01:30.603665 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" podStartSLOduration=2.080947601 podStartE2EDuration="2.603644906s" podCreationTimestamp="2025-10-03 09:01:28 +0000 UTC" firstStartedPulling="2025-10-03 09:01:29.443486366 +0000 UTC m=+1263.550971309" lastFinishedPulling="2025-10-03 09:01:29.966183661 +0000 UTC m=+1264.073668614" observedRunningTime="2025-10-03 09:01:30.595825409 +0000 UTC m=+1264.703310392" watchObservedRunningTime="2025-10-03 09:01:30.603644906 +0000 UTC m=+1264.711129849" Oct 03 09:01:42 crc kubenswrapper[4899]: I1003 09:01:42.198073 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:01:42 crc kubenswrapper[4899]: I1003 09:01:42.198587 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:01:47 crc kubenswrapper[4899]: I1003 09:01:47.707004 4899 scope.go:117] "RemoveContainer" containerID="938b832fd628929ec85ba42f8de6d491b92c6d3c49108bc2195100c9bc0ddd91" Oct 03 09:02:12 crc kubenswrapper[4899]: I1003 09:02:12.197757 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:02:12 crc kubenswrapper[4899]: I1003 09:02:12.198359 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:02:42 crc kubenswrapper[4899]: I1003 09:02:42.198770 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:02:42 crc kubenswrapper[4899]: I1003 09:02:42.200086 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:02:42 crc kubenswrapper[4899]: I1003 09:02:42.200173 4899 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 09:02:42 crc kubenswrapper[4899]: I1003 09:02:42.201066 4899 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"def1a55ad00f7229a571d38a4260404a613179a181d031abe241127ae4349c21"} pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 09:02:42 crc kubenswrapper[4899]: I1003 09:02:42.201140 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" containerID="cri-o://def1a55ad00f7229a571d38a4260404a613179a181d031abe241127ae4349c21" gracePeriod=600 Oct 03 09:02:43 crc kubenswrapper[4899]: I1003 09:02:43.183103 4899 generic.go:334] "Generic (PLEG): container finished" podID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerID="def1a55ad00f7229a571d38a4260404a613179a181d031abe241127ae4349c21" exitCode=0 Oct 03 09:02:43 crc kubenswrapper[4899]: I1003 09:02:43.183715 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerDied","Data":"def1a55ad00f7229a571d38a4260404a613179a181d031abe241127ae4349c21"} Oct 03 09:02:43 crc kubenswrapper[4899]: I1003 09:02:43.183764 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerStarted","Data":"7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b"} Oct 03 09:02:43 crc kubenswrapper[4899]: I1003 09:02:43.183786 4899 scope.go:117] "RemoveContainer" containerID="890db85d4fbdd365302c6e5ed34afe49d3195769246d9b9d458ffa94cd16c5f8" Oct 03 09:02:47 crc kubenswrapper[4899]: I1003 09:02:47.784552 4899 scope.go:117] "RemoveContainer" containerID="ffa3895e7d7efddf5d57593af3042344fd7f1f73bbc572ece7d300f48722c796" Oct 03 09:04:05 crc kubenswrapper[4899]: I1003 09:04:05.586188 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-clsrv"] Oct 03 09:04:05 crc kubenswrapper[4899]: I1003 09:04:05.589026 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-clsrv" Oct 03 09:04:05 crc kubenswrapper[4899]: I1003 09:04:05.604499 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-clsrv"] Oct 03 09:04:05 crc kubenswrapper[4899]: I1003 09:04:05.727878 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cddv4\" (UniqueName: \"kubernetes.io/projected/7156e6ac-fc7e-4e76-a177-285e1ef7fe51-kube-api-access-cddv4\") pod \"certified-operators-clsrv\" (UID: \"7156e6ac-fc7e-4e76-a177-285e1ef7fe51\") " pod="openshift-marketplace/certified-operators-clsrv" Oct 03 09:04:05 crc kubenswrapper[4899]: I1003 09:04:05.727937 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7156e6ac-fc7e-4e76-a177-285e1ef7fe51-catalog-content\") pod \"certified-operators-clsrv\" (UID: \"7156e6ac-fc7e-4e76-a177-285e1ef7fe51\") " pod="openshift-marketplace/certified-operators-clsrv" Oct 03 09:04:05 crc kubenswrapper[4899]: I1003 09:04:05.728041 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7156e6ac-fc7e-4e76-a177-285e1ef7fe51-utilities\") pod \"certified-operators-clsrv\" (UID: \"7156e6ac-fc7e-4e76-a177-285e1ef7fe51\") " pod="openshift-marketplace/certified-operators-clsrv" Oct 03 09:04:05 crc kubenswrapper[4899]: I1003 09:04:05.829779 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7156e6ac-fc7e-4e76-a177-285e1ef7fe51-catalog-content\") pod \"certified-operators-clsrv\" (UID: \"7156e6ac-fc7e-4e76-a177-285e1ef7fe51\") " pod="openshift-marketplace/certified-operators-clsrv" Oct 03 09:04:05 crc kubenswrapper[4899]: I1003 09:04:05.829835 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cddv4\" (UniqueName: \"kubernetes.io/projected/7156e6ac-fc7e-4e76-a177-285e1ef7fe51-kube-api-access-cddv4\") pod \"certified-operators-clsrv\" (UID: \"7156e6ac-fc7e-4e76-a177-285e1ef7fe51\") " pod="openshift-marketplace/certified-operators-clsrv" Oct 03 09:04:05 crc kubenswrapper[4899]: I1003 09:04:05.829998 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7156e6ac-fc7e-4e76-a177-285e1ef7fe51-utilities\") pod \"certified-operators-clsrv\" (UID: \"7156e6ac-fc7e-4e76-a177-285e1ef7fe51\") " pod="openshift-marketplace/certified-operators-clsrv" Oct 03 09:04:05 crc kubenswrapper[4899]: I1003 09:04:05.830592 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7156e6ac-fc7e-4e76-a177-285e1ef7fe51-utilities\") pod \"certified-operators-clsrv\" (UID: \"7156e6ac-fc7e-4e76-a177-285e1ef7fe51\") " pod="openshift-marketplace/certified-operators-clsrv" Oct 03 09:04:05 crc kubenswrapper[4899]: I1003 09:04:05.830908 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7156e6ac-fc7e-4e76-a177-285e1ef7fe51-catalog-content\") pod \"certified-operators-clsrv\" (UID: \"7156e6ac-fc7e-4e76-a177-285e1ef7fe51\") " pod="openshift-marketplace/certified-operators-clsrv" Oct 03 09:04:05 crc kubenswrapper[4899]: I1003 09:04:05.865255 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cddv4\" (UniqueName: \"kubernetes.io/projected/7156e6ac-fc7e-4e76-a177-285e1ef7fe51-kube-api-access-cddv4\") pod \"certified-operators-clsrv\" (UID: \"7156e6ac-fc7e-4e76-a177-285e1ef7fe51\") " pod="openshift-marketplace/certified-operators-clsrv" Oct 03 09:04:05 crc kubenswrapper[4899]: I1003 09:04:05.914067 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-clsrv" Oct 03 09:04:06 crc kubenswrapper[4899]: I1003 09:04:06.405184 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-clsrv"] Oct 03 09:04:06 crc kubenswrapper[4899]: I1003 09:04:06.920057 4899 generic.go:334] "Generic (PLEG): container finished" podID="7156e6ac-fc7e-4e76-a177-285e1ef7fe51" containerID="0382df8525119724b121d28367b70f5d3f650cb5de665ac6d9118bc4844eba7d" exitCode=0 Oct 03 09:04:06 crc kubenswrapper[4899]: I1003 09:04:06.920162 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-clsrv" event={"ID":"7156e6ac-fc7e-4e76-a177-285e1ef7fe51","Type":"ContainerDied","Data":"0382df8525119724b121d28367b70f5d3f650cb5de665ac6d9118bc4844eba7d"} Oct 03 09:04:06 crc kubenswrapper[4899]: I1003 09:04:06.920318 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-clsrv" event={"ID":"7156e6ac-fc7e-4e76-a177-285e1ef7fe51","Type":"ContainerStarted","Data":"9a825abd7c8d5c7473cb2e14336cc077bc60320c567c9160d6e1647a11c8b866"} Oct 03 09:04:07 crc kubenswrapper[4899]: I1003 09:04:07.931039 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-clsrv" event={"ID":"7156e6ac-fc7e-4e76-a177-285e1ef7fe51","Type":"ContainerStarted","Data":"3627f358eb8d969800fb2c084f00fdc673da3fea2d1f5da5ade4da962b9206c5"} Oct 03 09:04:08 crc kubenswrapper[4899]: I1003 09:04:08.941566 4899 generic.go:334] "Generic (PLEG): container finished" podID="7156e6ac-fc7e-4e76-a177-285e1ef7fe51" containerID="3627f358eb8d969800fb2c084f00fdc673da3fea2d1f5da5ade4da962b9206c5" exitCode=0 Oct 03 09:04:08 crc kubenswrapper[4899]: I1003 09:04:08.942047 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-clsrv" event={"ID":"7156e6ac-fc7e-4e76-a177-285e1ef7fe51","Type":"ContainerDied","Data":"3627f358eb8d969800fb2c084f00fdc673da3fea2d1f5da5ade4da962b9206c5"} Oct 03 09:04:09 crc kubenswrapper[4899]: I1003 09:04:09.955540 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-clsrv" event={"ID":"7156e6ac-fc7e-4e76-a177-285e1ef7fe51","Type":"ContainerStarted","Data":"0d1d18aabfac2673d1011f225457df37eb4e93ae11f9941b82acb4aef6906929"} Oct 03 09:04:09 crc kubenswrapper[4899]: I1003 09:04:09.973820 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-clsrv" podStartSLOduration=2.549654371 podStartE2EDuration="4.973797707s" podCreationTimestamp="2025-10-03 09:04:05 +0000 UTC" firstStartedPulling="2025-10-03 09:04:06.921668594 +0000 UTC m=+1421.029153547" lastFinishedPulling="2025-10-03 09:04:09.34581193 +0000 UTC m=+1423.453296883" observedRunningTime="2025-10-03 09:04:09.969511762 +0000 UTC m=+1424.076996735" watchObservedRunningTime="2025-10-03 09:04:09.973797707 +0000 UTC m=+1424.081282660" Oct 03 09:04:12 crc kubenswrapper[4899]: I1003 09:04:12.969721 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cf6gf"] Oct 03 09:04:12 crc kubenswrapper[4899]: I1003 09:04:12.972488 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cf6gf" Oct 03 09:04:12 crc kubenswrapper[4899]: I1003 09:04:12.987846 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cf6gf"] Oct 03 09:04:13 crc kubenswrapper[4899]: I1003 09:04:13.115775 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6-catalog-content\") pod \"redhat-marketplace-cf6gf\" (UID: \"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6\") " pod="openshift-marketplace/redhat-marketplace-cf6gf" Oct 03 09:04:13 crc kubenswrapper[4899]: I1003 09:04:13.115932 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6-utilities\") pod \"redhat-marketplace-cf6gf\" (UID: \"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6\") " pod="openshift-marketplace/redhat-marketplace-cf6gf" Oct 03 09:04:13 crc kubenswrapper[4899]: I1003 09:04:13.115983 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6blgx\" (UniqueName: \"kubernetes.io/projected/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6-kube-api-access-6blgx\") pod \"redhat-marketplace-cf6gf\" (UID: \"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6\") " pod="openshift-marketplace/redhat-marketplace-cf6gf" Oct 03 09:04:13 crc kubenswrapper[4899]: I1003 09:04:13.218117 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6-catalog-content\") pod \"redhat-marketplace-cf6gf\" (UID: \"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6\") " pod="openshift-marketplace/redhat-marketplace-cf6gf" Oct 03 09:04:13 crc kubenswrapper[4899]: I1003 09:04:13.218302 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6-utilities\") pod \"redhat-marketplace-cf6gf\" (UID: \"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6\") " pod="openshift-marketplace/redhat-marketplace-cf6gf" Oct 03 09:04:13 crc kubenswrapper[4899]: I1003 09:04:13.218363 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6blgx\" (UniqueName: \"kubernetes.io/projected/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6-kube-api-access-6blgx\") pod \"redhat-marketplace-cf6gf\" (UID: \"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6\") " pod="openshift-marketplace/redhat-marketplace-cf6gf" Oct 03 09:04:13 crc kubenswrapper[4899]: I1003 09:04:13.219030 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6-catalog-content\") pod \"redhat-marketplace-cf6gf\" (UID: \"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6\") " pod="openshift-marketplace/redhat-marketplace-cf6gf" Oct 03 09:04:13 crc kubenswrapper[4899]: I1003 09:04:13.219240 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6-utilities\") pod \"redhat-marketplace-cf6gf\" (UID: \"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6\") " pod="openshift-marketplace/redhat-marketplace-cf6gf" Oct 03 09:04:13 crc kubenswrapper[4899]: I1003 09:04:13.248284 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6blgx\" (UniqueName: \"kubernetes.io/projected/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6-kube-api-access-6blgx\") pod \"redhat-marketplace-cf6gf\" (UID: \"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6\") " pod="openshift-marketplace/redhat-marketplace-cf6gf" Oct 03 09:04:13 crc kubenswrapper[4899]: I1003 09:04:13.312806 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cf6gf" Oct 03 09:04:13 crc kubenswrapper[4899]: I1003 09:04:13.860160 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cf6gf"] Oct 03 09:04:13 crc kubenswrapper[4899]: I1003 09:04:13.997230 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cf6gf" event={"ID":"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6","Type":"ContainerStarted","Data":"5b4d2b62a65e0bd7d1ed189192dad4979c82ae9e31cc5b68e2676eaea5518d83"} Oct 03 09:04:15 crc kubenswrapper[4899]: I1003 09:04:15.012206 4899 generic.go:334] "Generic (PLEG): container finished" podID="39bab9f9-23e6-4285-8f2d-ddb7bf0682f6" containerID="19592965d09adfde48a5e467773d501fa933baf1450fc06ea16c93299ca8c7d6" exitCode=0 Oct 03 09:04:15 crc kubenswrapper[4899]: I1003 09:04:15.012260 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cf6gf" event={"ID":"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6","Type":"ContainerDied","Data":"19592965d09adfde48a5e467773d501fa933baf1450fc06ea16c93299ca8c7d6"} Oct 03 09:04:15 crc kubenswrapper[4899]: I1003 09:04:15.914556 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-clsrv" Oct 03 09:04:15 crc kubenswrapper[4899]: I1003 09:04:15.915026 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-clsrv" Oct 03 09:04:15 crc kubenswrapper[4899]: I1003 09:04:15.964381 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-clsrv" Oct 03 09:04:16 crc kubenswrapper[4899]: I1003 09:04:16.022098 4899 generic.go:334] "Generic (PLEG): container finished" podID="39bab9f9-23e6-4285-8f2d-ddb7bf0682f6" containerID="cf7dd38554a304f51e177152126fba5b1521ff2bac43b62e19c377141361ac9a" exitCode=0 Oct 03 09:04:16 crc kubenswrapper[4899]: I1003 09:04:16.022228 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cf6gf" event={"ID":"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6","Type":"ContainerDied","Data":"cf7dd38554a304f51e177152126fba5b1521ff2bac43b62e19c377141361ac9a"} Oct 03 09:04:16 crc kubenswrapper[4899]: I1003 09:04:16.068123 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-clsrv" Oct 03 09:04:17 crc kubenswrapper[4899]: I1003 09:04:17.031807 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cf6gf" event={"ID":"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6","Type":"ContainerStarted","Data":"cf8605686b24308618448e7ade86b4ef8a7dbe316e3568efca8cdfe44d5f546d"} Oct 03 09:04:17 crc kubenswrapper[4899]: I1003 09:04:17.053361 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cf6gf" podStartSLOduration=3.379822249 podStartE2EDuration="5.053343838s" podCreationTimestamp="2025-10-03 09:04:12 +0000 UTC" firstStartedPulling="2025-10-03 09:04:15.015399089 +0000 UTC m=+1429.122884032" lastFinishedPulling="2025-10-03 09:04:16.688920658 +0000 UTC m=+1430.796405621" observedRunningTime="2025-10-03 09:04:17.051219971 +0000 UTC m=+1431.158704924" watchObservedRunningTime="2025-10-03 09:04:17.053343838 +0000 UTC m=+1431.160828791" Oct 03 09:04:18 crc kubenswrapper[4899]: I1003 09:04:18.165828 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-clsrv"] Oct 03 09:04:18 crc kubenswrapper[4899]: I1003 09:04:18.166344 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-clsrv" podUID="7156e6ac-fc7e-4e76-a177-285e1ef7fe51" containerName="registry-server" containerID="cri-o://0d1d18aabfac2673d1011f225457df37eb4e93ae11f9941b82acb4aef6906929" gracePeriod=2 Oct 03 09:04:18 crc kubenswrapper[4899]: I1003 09:04:18.615643 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-clsrv" Oct 03 09:04:18 crc kubenswrapper[4899]: I1003 09:04:18.733826 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7156e6ac-fc7e-4e76-a177-285e1ef7fe51-utilities\") pod \"7156e6ac-fc7e-4e76-a177-285e1ef7fe51\" (UID: \"7156e6ac-fc7e-4e76-a177-285e1ef7fe51\") " Oct 03 09:04:18 crc kubenswrapper[4899]: I1003 09:04:18.733973 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7156e6ac-fc7e-4e76-a177-285e1ef7fe51-catalog-content\") pod \"7156e6ac-fc7e-4e76-a177-285e1ef7fe51\" (UID: \"7156e6ac-fc7e-4e76-a177-285e1ef7fe51\") " Oct 03 09:04:18 crc kubenswrapper[4899]: I1003 09:04:18.734095 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cddv4\" (UniqueName: \"kubernetes.io/projected/7156e6ac-fc7e-4e76-a177-285e1ef7fe51-kube-api-access-cddv4\") pod \"7156e6ac-fc7e-4e76-a177-285e1ef7fe51\" (UID: \"7156e6ac-fc7e-4e76-a177-285e1ef7fe51\") " Oct 03 09:04:18 crc kubenswrapper[4899]: I1003 09:04:18.735594 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7156e6ac-fc7e-4e76-a177-285e1ef7fe51-utilities" (OuterVolumeSpecName: "utilities") pod "7156e6ac-fc7e-4e76-a177-285e1ef7fe51" (UID: "7156e6ac-fc7e-4e76-a177-285e1ef7fe51"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:04:18 crc kubenswrapper[4899]: I1003 09:04:18.742397 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7156e6ac-fc7e-4e76-a177-285e1ef7fe51-kube-api-access-cddv4" (OuterVolumeSpecName: "kube-api-access-cddv4") pod "7156e6ac-fc7e-4e76-a177-285e1ef7fe51" (UID: "7156e6ac-fc7e-4e76-a177-285e1ef7fe51"). InnerVolumeSpecName "kube-api-access-cddv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:04:18 crc kubenswrapper[4899]: I1003 09:04:18.781842 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7156e6ac-fc7e-4e76-a177-285e1ef7fe51-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7156e6ac-fc7e-4e76-a177-285e1ef7fe51" (UID: "7156e6ac-fc7e-4e76-a177-285e1ef7fe51"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:04:18 crc kubenswrapper[4899]: I1003 09:04:18.835966 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7156e6ac-fc7e-4e76-a177-285e1ef7fe51-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 09:04:18 crc kubenswrapper[4899]: I1003 09:04:18.836005 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7156e6ac-fc7e-4e76-a177-285e1ef7fe51-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 09:04:18 crc kubenswrapper[4899]: I1003 09:04:18.836020 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cddv4\" (UniqueName: \"kubernetes.io/projected/7156e6ac-fc7e-4e76-a177-285e1ef7fe51-kube-api-access-cddv4\") on node \"crc\" DevicePath \"\"" Oct 03 09:04:19 crc kubenswrapper[4899]: I1003 09:04:19.062510 4899 generic.go:334] "Generic (PLEG): container finished" podID="7156e6ac-fc7e-4e76-a177-285e1ef7fe51" containerID="0d1d18aabfac2673d1011f225457df37eb4e93ae11f9941b82acb4aef6906929" exitCode=0 Oct 03 09:04:19 crc kubenswrapper[4899]: I1003 09:04:19.062559 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-clsrv" event={"ID":"7156e6ac-fc7e-4e76-a177-285e1ef7fe51","Type":"ContainerDied","Data":"0d1d18aabfac2673d1011f225457df37eb4e93ae11f9941b82acb4aef6906929"} Oct 03 09:04:19 crc kubenswrapper[4899]: I1003 09:04:19.062585 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-clsrv" event={"ID":"7156e6ac-fc7e-4e76-a177-285e1ef7fe51","Type":"ContainerDied","Data":"9a825abd7c8d5c7473cb2e14336cc077bc60320c567c9160d6e1647a11c8b866"} Oct 03 09:04:19 crc kubenswrapper[4899]: I1003 09:04:19.062601 4899 scope.go:117] "RemoveContainer" containerID="0d1d18aabfac2673d1011f225457df37eb4e93ae11f9941b82acb4aef6906929" Oct 03 09:04:19 crc kubenswrapper[4899]: I1003 09:04:19.062618 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-clsrv" Oct 03 09:04:19 crc kubenswrapper[4899]: I1003 09:04:19.086833 4899 scope.go:117] "RemoveContainer" containerID="3627f358eb8d969800fb2c084f00fdc673da3fea2d1f5da5ade4da962b9206c5" Oct 03 09:04:19 crc kubenswrapper[4899]: I1003 09:04:19.098024 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-clsrv"] Oct 03 09:04:19 crc kubenswrapper[4899]: I1003 09:04:19.111253 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-clsrv"] Oct 03 09:04:19 crc kubenswrapper[4899]: I1003 09:04:19.115817 4899 scope.go:117] "RemoveContainer" containerID="0382df8525119724b121d28367b70f5d3f650cb5de665ac6d9118bc4844eba7d" Oct 03 09:04:19 crc kubenswrapper[4899]: I1003 09:04:19.150145 4899 scope.go:117] "RemoveContainer" containerID="0d1d18aabfac2673d1011f225457df37eb4e93ae11f9941b82acb4aef6906929" Oct 03 09:04:19 crc kubenswrapper[4899]: E1003 09:04:19.150798 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d1d18aabfac2673d1011f225457df37eb4e93ae11f9941b82acb4aef6906929\": container with ID starting with 0d1d18aabfac2673d1011f225457df37eb4e93ae11f9941b82acb4aef6906929 not found: ID does not exist" containerID="0d1d18aabfac2673d1011f225457df37eb4e93ae11f9941b82acb4aef6906929" Oct 03 09:04:19 crc kubenswrapper[4899]: I1003 09:04:19.150863 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d1d18aabfac2673d1011f225457df37eb4e93ae11f9941b82acb4aef6906929"} err="failed to get container status \"0d1d18aabfac2673d1011f225457df37eb4e93ae11f9941b82acb4aef6906929\": rpc error: code = NotFound desc = could not find container \"0d1d18aabfac2673d1011f225457df37eb4e93ae11f9941b82acb4aef6906929\": container with ID starting with 0d1d18aabfac2673d1011f225457df37eb4e93ae11f9941b82acb4aef6906929 not found: ID does not exist" Oct 03 09:04:19 crc kubenswrapper[4899]: I1003 09:04:19.150908 4899 scope.go:117] "RemoveContainer" containerID="3627f358eb8d969800fb2c084f00fdc673da3fea2d1f5da5ade4da962b9206c5" Oct 03 09:04:19 crc kubenswrapper[4899]: E1003 09:04:19.151610 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3627f358eb8d969800fb2c084f00fdc673da3fea2d1f5da5ade4da962b9206c5\": container with ID starting with 3627f358eb8d969800fb2c084f00fdc673da3fea2d1f5da5ade4da962b9206c5 not found: ID does not exist" containerID="3627f358eb8d969800fb2c084f00fdc673da3fea2d1f5da5ade4da962b9206c5" Oct 03 09:04:19 crc kubenswrapper[4899]: I1003 09:04:19.151673 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3627f358eb8d969800fb2c084f00fdc673da3fea2d1f5da5ade4da962b9206c5"} err="failed to get container status \"3627f358eb8d969800fb2c084f00fdc673da3fea2d1f5da5ade4da962b9206c5\": rpc error: code = NotFound desc = could not find container \"3627f358eb8d969800fb2c084f00fdc673da3fea2d1f5da5ade4da962b9206c5\": container with ID starting with 3627f358eb8d969800fb2c084f00fdc673da3fea2d1f5da5ade4da962b9206c5 not found: ID does not exist" Oct 03 09:04:19 crc kubenswrapper[4899]: I1003 09:04:19.151685 4899 scope.go:117] "RemoveContainer" containerID="0382df8525119724b121d28367b70f5d3f650cb5de665ac6d9118bc4844eba7d" Oct 03 09:04:19 crc kubenswrapper[4899]: E1003 09:04:19.152162 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0382df8525119724b121d28367b70f5d3f650cb5de665ac6d9118bc4844eba7d\": container with ID starting with 0382df8525119724b121d28367b70f5d3f650cb5de665ac6d9118bc4844eba7d not found: ID does not exist" containerID="0382df8525119724b121d28367b70f5d3f650cb5de665ac6d9118bc4844eba7d" Oct 03 09:04:19 crc kubenswrapper[4899]: I1003 09:04:19.152209 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0382df8525119724b121d28367b70f5d3f650cb5de665ac6d9118bc4844eba7d"} err="failed to get container status \"0382df8525119724b121d28367b70f5d3f650cb5de665ac6d9118bc4844eba7d\": rpc error: code = NotFound desc = could not find container \"0382df8525119724b121d28367b70f5d3f650cb5de665ac6d9118bc4844eba7d\": container with ID starting with 0382df8525119724b121d28367b70f5d3f650cb5de665ac6d9118bc4844eba7d not found: ID does not exist" Oct 03 09:04:20 crc kubenswrapper[4899]: I1003 09:04:20.538386 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7156e6ac-fc7e-4e76-a177-285e1ef7fe51" path="/var/lib/kubelet/pods/7156e6ac-fc7e-4e76-a177-285e1ef7fe51/volumes" Oct 03 09:04:23 crc kubenswrapper[4899]: I1003 09:04:23.312937 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-cf6gf" Oct 03 09:04:23 crc kubenswrapper[4899]: I1003 09:04:23.313281 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-cf6gf" Oct 03 09:04:23 crc kubenswrapper[4899]: I1003 09:04:23.357483 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-cf6gf" Oct 03 09:04:24 crc kubenswrapper[4899]: I1003 09:04:24.153235 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-cf6gf" Oct 03 09:04:24 crc kubenswrapper[4899]: I1003 09:04:24.199016 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cf6gf"] Oct 03 09:04:26 crc kubenswrapper[4899]: I1003 09:04:26.130033 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-cf6gf" podUID="39bab9f9-23e6-4285-8f2d-ddb7bf0682f6" containerName="registry-server" containerID="cri-o://cf8605686b24308618448e7ade86b4ef8a7dbe316e3568efca8cdfe44d5f546d" gracePeriod=2 Oct 03 09:04:26 crc kubenswrapper[4899]: I1003 09:04:26.592876 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cf6gf" Oct 03 09:04:26 crc kubenswrapper[4899]: I1003 09:04:26.702780 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6blgx\" (UniqueName: \"kubernetes.io/projected/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6-kube-api-access-6blgx\") pod \"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6\" (UID: \"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6\") " Oct 03 09:04:26 crc kubenswrapper[4899]: I1003 09:04:26.714354 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6-utilities\") pod \"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6\" (UID: \"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6\") " Oct 03 09:04:26 crc kubenswrapper[4899]: I1003 09:04:26.714526 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6-catalog-content\") pod \"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6\" (UID: \"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6\") " Oct 03 09:04:26 crc kubenswrapper[4899]: I1003 09:04:26.716719 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6-utilities" (OuterVolumeSpecName: "utilities") pod "39bab9f9-23e6-4285-8f2d-ddb7bf0682f6" (UID: "39bab9f9-23e6-4285-8f2d-ddb7bf0682f6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:04:26 crc kubenswrapper[4899]: I1003 09:04:26.727377 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6-kube-api-access-6blgx" (OuterVolumeSpecName: "kube-api-access-6blgx") pod "39bab9f9-23e6-4285-8f2d-ddb7bf0682f6" (UID: "39bab9f9-23e6-4285-8f2d-ddb7bf0682f6"). InnerVolumeSpecName "kube-api-access-6blgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:04:26 crc kubenswrapper[4899]: I1003 09:04:26.742710 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "39bab9f9-23e6-4285-8f2d-ddb7bf0682f6" (UID: "39bab9f9-23e6-4285-8f2d-ddb7bf0682f6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:04:26 crc kubenswrapper[4899]: I1003 09:04:26.816606 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6blgx\" (UniqueName: \"kubernetes.io/projected/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6-kube-api-access-6blgx\") on node \"crc\" DevicePath \"\"" Oct 03 09:04:26 crc kubenswrapper[4899]: I1003 09:04:26.816640 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 09:04:26 crc kubenswrapper[4899]: I1003 09:04:26.816650 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 09:04:27 crc kubenswrapper[4899]: I1003 09:04:27.142227 4899 generic.go:334] "Generic (PLEG): container finished" podID="39bab9f9-23e6-4285-8f2d-ddb7bf0682f6" containerID="cf8605686b24308618448e7ade86b4ef8a7dbe316e3568efca8cdfe44d5f546d" exitCode=0 Oct 03 09:04:27 crc kubenswrapper[4899]: I1003 09:04:27.142318 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cf6gf" Oct 03 09:04:27 crc kubenswrapper[4899]: I1003 09:04:27.142307 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cf6gf" event={"ID":"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6","Type":"ContainerDied","Data":"cf8605686b24308618448e7ade86b4ef8a7dbe316e3568efca8cdfe44d5f546d"} Oct 03 09:04:27 crc kubenswrapper[4899]: I1003 09:04:27.142425 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cf6gf" event={"ID":"39bab9f9-23e6-4285-8f2d-ddb7bf0682f6","Type":"ContainerDied","Data":"5b4d2b62a65e0bd7d1ed189192dad4979c82ae9e31cc5b68e2676eaea5518d83"} Oct 03 09:04:27 crc kubenswrapper[4899]: I1003 09:04:27.142450 4899 scope.go:117] "RemoveContainer" containerID="cf8605686b24308618448e7ade86b4ef8a7dbe316e3568efca8cdfe44d5f546d" Oct 03 09:04:27 crc kubenswrapper[4899]: I1003 09:04:27.168945 4899 scope.go:117] "RemoveContainer" containerID="cf7dd38554a304f51e177152126fba5b1521ff2bac43b62e19c377141361ac9a" Oct 03 09:04:27 crc kubenswrapper[4899]: I1003 09:04:27.186540 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cf6gf"] Oct 03 09:04:27 crc kubenswrapper[4899]: I1003 09:04:27.208181 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-cf6gf"] Oct 03 09:04:27 crc kubenswrapper[4899]: I1003 09:04:27.221473 4899 scope.go:117] "RemoveContainer" containerID="19592965d09adfde48a5e467773d501fa933baf1450fc06ea16c93299ca8c7d6" Oct 03 09:04:27 crc kubenswrapper[4899]: I1003 09:04:27.242592 4899 scope.go:117] "RemoveContainer" containerID="cf8605686b24308618448e7ade86b4ef8a7dbe316e3568efca8cdfe44d5f546d" Oct 03 09:04:27 crc kubenswrapper[4899]: E1003 09:04:27.242833 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf8605686b24308618448e7ade86b4ef8a7dbe316e3568efca8cdfe44d5f546d\": container with ID starting with cf8605686b24308618448e7ade86b4ef8a7dbe316e3568efca8cdfe44d5f546d not found: ID does not exist" containerID="cf8605686b24308618448e7ade86b4ef8a7dbe316e3568efca8cdfe44d5f546d" Oct 03 09:04:27 crc kubenswrapper[4899]: I1003 09:04:27.242860 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf8605686b24308618448e7ade86b4ef8a7dbe316e3568efca8cdfe44d5f546d"} err="failed to get container status \"cf8605686b24308618448e7ade86b4ef8a7dbe316e3568efca8cdfe44d5f546d\": rpc error: code = NotFound desc = could not find container \"cf8605686b24308618448e7ade86b4ef8a7dbe316e3568efca8cdfe44d5f546d\": container with ID starting with cf8605686b24308618448e7ade86b4ef8a7dbe316e3568efca8cdfe44d5f546d not found: ID does not exist" Oct 03 09:04:27 crc kubenswrapper[4899]: I1003 09:04:27.242906 4899 scope.go:117] "RemoveContainer" containerID="cf7dd38554a304f51e177152126fba5b1521ff2bac43b62e19c377141361ac9a" Oct 03 09:04:27 crc kubenswrapper[4899]: E1003 09:04:27.243314 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf7dd38554a304f51e177152126fba5b1521ff2bac43b62e19c377141361ac9a\": container with ID starting with cf7dd38554a304f51e177152126fba5b1521ff2bac43b62e19c377141361ac9a not found: ID does not exist" containerID="cf7dd38554a304f51e177152126fba5b1521ff2bac43b62e19c377141361ac9a" Oct 03 09:04:27 crc kubenswrapper[4899]: I1003 09:04:27.243360 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf7dd38554a304f51e177152126fba5b1521ff2bac43b62e19c377141361ac9a"} err="failed to get container status \"cf7dd38554a304f51e177152126fba5b1521ff2bac43b62e19c377141361ac9a\": rpc error: code = NotFound desc = could not find container \"cf7dd38554a304f51e177152126fba5b1521ff2bac43b62e19c377141361ac9a\": container with ID starting with cf7dd38554a304f51e177152126fba5b1521ff2bac43b62e19c377141361ac9a not found: ID does not exist" Oct 03 09:04:27 crc kubenswrapper[4899]: I1003 09:04:27.243373 4899 scope.go:117] "RemoveContainer" containerID="19592965d09adfde48a5e467773d501fa933baf1450fc06ea16c93299ca8c7d6" Oct 03 09:04:27 crc kubenswrapper[4899]: E1003 09:04:27.243624 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19592965d09adfde48a5e467773d501fa933baf1450fc06ea16c93299ca8c7d6\": container with ID starting with 19592965d09adfde48a5e467773d501fa933baf1450fc06ea16c93299ca8c7d6 not found: ID does not exist" containerID="19592965d09adfde48a5e467773d501fa933baf1450fc06ea16c93299ca8c7d6" Oct 03 09:04:27 crc kubenswrapper[4899]: I1003 09:04:27.243734 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19592965d09adfde48a5e467773d501fa933baf1450fc06ea16c93299ca8c7d6"} err="failed to get container status \"19592965d09adfde48a5e467773d501fa933baf1450fc06ea16c93299ca8c7d6\": rpc error: code = NotFound desc = could not find container \"19592965d09adfde48a5e467773d501fa933baf1450fc06ea16c93299ca8c7d6\": container with ID starting with 19592965d09adfde48a5e467773d501fa933baf1450fc06ea16c93299ca8c7d6 not found: ID does not exist" Oct 03 09:04:28 crc kubenswrapper[4899]: I1003 09:04:28.537969 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39bab9f9-23e6-4285-8f2d-ddb7bf0682f6" path="/var/lib/kubelet/pods/39bab9f9-23e6-4285-8f2d-ddb7bf0682f6/volumes" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.056987 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bgmpm"] Oct 03 09:04:31 crc kubenswrapper[4899]: E1003 09:04:31.057757 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7156e6ac-fc7e-4e76-a177-285e1ef7fe51" containerName="extract-utilities" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.057776 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="7156e6ac-fc7e-4e76-a177-285e1ef7fe51" containerName="extract-utilities" Oct 03 09:04:31 crc kubenswrapper[4899]: E1003 09:04:31.057791 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39bab9f9-23e6-4285-8f2d-ddb7bf0682f6" containerName="registry-server" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.057799 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="39bab9f9-23e6-4285-8f2d-ddb7bf0682f6" containerName="registry-server" Oct 03 09:04:31 crc kubenswrapper[4899]: E1003 09:04:31.057814 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39bab9f9-23e6-4285-8f2d-ddb7bf0682f6" containerName="extract-utilities" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.057823 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="39bab9f9-23e6-4285-8f2d-ddb7bf0682f6" containerName="extract-utilities" Oct 03 09:04:31 crc kubenswrapper[4899]: E1003 09:04:31.057833 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7156e6ac-fc7e-4e76-a177-285e1ef7fe51" containerName="registry-server" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.057840 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="7156e6ac-fc7e-4e76-a177-285e1ef7fe51" containerName="registry-server" Oct 03 09:04:31 crc kubenswrapper[4899]: E1003 09:04:31.057856 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7156e6ac-fc7e-4e76-a177-285e1ef7fe51" containerName="extract-content" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.057865 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="7156e6ac-fc7e-4e76-a177-285e1ef7fe51" containerName="extract-content" Oct 03 09:04:31 crc kubenswrapper[4899]: E1003 09:04:31.057910 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39bab9f9-23e6-4285-8f2d-ddb7bf0682f6" containerName="extract-content" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.057919 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="39bab9f9-23e6-4285-8f2d-ddb7bf0682f6" containerName="extract-content" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.058139 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="39bab9f9-23e6-4285-8f2d-ddb7bf0682f6" containerName="registry-server" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.058152 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="7156e6ac-fc7e-4e76-a177-285e1ef7fe51" containerName="registry-server" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.061175 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bgmpm" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.109998 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bgmpm"] Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.195723 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsmmb\" (UniqueName: \"kubernetes.io/projected/c117d51d-dc35-418c-97bd-3eafdb30385e-kube-api-access-xsmmb\") pod \"community-operators-bgmpm\" (UID: \"c117d51d-dc35-418c-97bd-3eafdb30385e\") " pod="openshift-marketplace/community-operators-bgmpm" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.195925 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c117d51d-dc35-418c-97bd-3eafdb30385e-catalog-content\") pod \"community-operators-bgmpm\" (UID: \"c117d51d-dc35-418c-97bd-3eafdb30385e\") " pod="openshift-marketplace/community-operators-bgmpm" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.196002 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c117d51d-dc35-418c-97bd-3eafdb30385e-utilities\") pod \"community-operators-bgmpm\" (UID: \"c117d51d-dc35-418c-97bd-3eafdb30385e\") " pod="openshift-marketplace/community-operators-bgmpm" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.297291 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c117d51d-dc35-418c-97bd-3eafdb30385e-catalog-content\") pod \"community-operators-bgmpm\" (UID: \"c117d51d-dc35-418c-97bd-3eafdb30385e\") " pod="openshift-marketplace/community-operators-bgmpm" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.297337 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c117d51d-dc35-418c-97bd-3eafdb30385e-utilities\") pod \"community-operators-bgmpm\" (UID: \"c117d51d-dc35-418c-97bd-3eafdb30385e\") " pod="openshift-marketplace/community-operators-bgmpm" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.297416 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsmmb\" (UniqueName: \"kubernetes.io/projected/c117d51d-dc35-418c-97bd-3eafdb30385e-kube-api-access-xsmmb\") pod \"community-operators-bgmpm\" (UID: \"c117d51d-dc35-418c-97bd-3eafdb30385e\") " pod="openshift-marketplace/community-operators-bgmpm" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.298455 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c117d51d-dc35-418c-97bd-3eafdb30385e-catalog-content\") pod \"community-operators-bgmpm\" (UID: \"c117d51d-dc35-418c-97bd-3eafdb30385e\") " pod="openshift-marketplace/community-operators-bgmpm" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.298719 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c117d51d-dc35-418c-97bd-3eafdb30385e-utilities\") pod \"community-operators-bgmpm\" (UID: \"c117d51d-dc35-418c-97bd-3eafdb30385e\") " pod="openshift-marketplace/community-operators-bgmpm" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.320816 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsmmb\" (UniqueName: \"kubernetes.io/projected/c117d51d-dc35-418c-97bd-3eafdb30385e-kube-api-access-xsmmb\") pod \"community-operators-bgmpm\" (UID: \"c117d51d-dc35-418c-97bd-3eafdb30385e\") " pod="openshift-marketplace/community-operators-bgmpm" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.403765 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bgmpm" Oct 03 09:04:31 crc kubenswrapper[4899]: I1003 09:04:31.997481 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bgmpm"] Oct 03 09:04:31 crc kubenswrapper[4899]: W1003 09:04:31.999552 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc117d51d_dc35_418c_97bd_3eafdb30385e.slice/crio-3999d9d519ae02dd8f3bbcdb6fdf1c109552bea16cf8adcc1b91daf3f8f5ca3d WatchSource:0}: Error finding container 3999d9d519ae02dd8f3bbcdb6fdf1c109552bea16cf8adcc1b91daf3f8f5ca3d: Status 404 returned error can't find the container with id 3999d9d519ae02dd8f3bbcdb6fdf1c109552bea16cf8adcc1b91daf3f8f5ca3d Oct 03 09:04:32 crc kubenswrapper[4899]: I1003 09:04:32.187583 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgmpm" event={"ID":"c117d51d-dc35-418c-97bd-3eafdb30385e","Type":"ContainerStarted","Data":"3999d9d519ae02dd8f3bbcdb6fdf1c109552bea16cf8adcc1b91daf3f8f5ca3d"} Oct 03 09:04:33 crc kubenswrapper[4899]: I1003 09:04:33.198906 4899 generic.go:334] "Generic (PLEG): container finished" podID="c117d51d-dc35-418c-97bd-3eafdb30385e" containerID="d89419d8e3758485b3aa9d36e1a4d6cba0c21a90676aade5914d17e08a26c891" exitCode=0 Oct 03 09:04:33 crc kubenswrapper[4899]: I1003 09:04:33.198948 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgmpm" event={"ID":"c117d51d-dc35-418c-97bd-3eafdb30385e","Type":"ContainerDied","Data":"d89419d8e3758485b3aa9d36e1a4d6cba0c21a90676aade5914d17e08a26c891"} Oct 03 09:04:34 crc kubenswrapper[4899]: I1003 09:04:34.210719 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgmpm" event={"ID":"c117d51d-dc35-418c-97bd-3eafdb30385e","Type":"ContainerStarted","Data":"82e74537325b36513a131ce49245ef70e8d9bfac00f258b936b8b6ecb60c45f6"} Oct 03 09:04:35 crc kubenswrapper[4899]: I1003 09:04:35.223400 4899 generic.go:334] "Generic (PLEG): container finished" podID="c117d51d-dc35-418c-97bd-3eafdb30385e" containerID="82e74537325b36513a131ce49245ef70e8d9bfac00f258b936b8b6ecb60c45f6" exitCode=0 Oct 03 09:04:35 crc kubenswrapper[4899]: I1003 09:04:35.223501 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgmpm" event={"ID":"c117d51d-dc35-418c-97bd-3eafdb30385e","Type":"ContainerDied","Data":"82e74537325b36513a131ce49245ef70e8d9bfac00f258b936b8b6ecb60c45f6"} Oct 03 09:04:36 crc kubenswrapper[4899]: I1003 09:04:36.258033 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgmpm" event={"ID":"c117d51d-dc35-418c-97bd-3eafdb30385e","Type":"ContainerStarted","Data":"e525f745f6d1101decc60300e3fd2d5fe264a364a274ded61581a753361e55aa"} Oct 03 09:04:36 crc kubenswrapper[4899]: I1003 09:04:36.281325 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bgmpm" podStartSLOduration=2.8630185 podStartE2EDuration="5.281303701s" podCreationTimestamp="2025-10-03 09:04:31 +0000 UTC" firstStartedPulling="2025-10-03 09:04:33.201235357 +0000 UTC m=+1447.308720300" lastFinishedPulling="2025-10-03 09:04:35.619520548 +0000 UTC m=+1449.727005501" observedRunningTime="2025-10-03 09:04:36.273965549 +0000 UTC m=+1450.381450502" watchObservedRunningTime="2025-10-03 09:04:36.281303701 +0000 UTC m=+1450.388788654" Oct 03 09:04:37 crc kubenswrapper[4899]: I1003 09:04:37.269451 4899 generic.go:334] "Generic (PLEG): container finished" podID="9ad20d0d-637a-49d9-8a83-bfae0d7c2a37" containerID="fe8201fbb8672ca9e1221ff63409aeb74c6c0a28ecc101fd061b5ff5f82c8620" exitCode=0 Oct 03 09:04:37 crc kubenswrapper[4899]: I1003 09:04:37.269520 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" event={"ID":"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37","Type":"ContainerDied","Data":"fe8201fbb8672ca9e1221ff63409aeb74c6c0a28ecc101fd061b5ff5f82c8620"} Oct 03 09:04:38 crc kubenswrapper[4899]: I1003 09:04:38.698315 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" Oct 03 09:04:38 crc kubenswrapper[4899]: I1003 09:04:38.840248 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89rr9\" (UniqueName: \"kubernetes.io/projected/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-kube-api-access-89rr9\") pod \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\" (UID: \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\") " Oct 03 09:04:38 crc kubenswrapper[4899]: I1003 09:04:38.840359 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-ssh-key\") pod \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\" (UID: \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\") " Oct 03 09:04:38 crc kubenswrapper[4899]: I1003 09:04:38.840410 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-inventory\") pod \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\" (UID: \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\") " Oct 03 09:04:38 crc kubenswrapper[4899]: I1003 09:04:38.840658 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-bootstrap-combined-ca-bundle\") pod \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\" (UID: \"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37\") " Oct 03 09:04:38 crc kubenswrapper[4899]: I1003 09:04:38.846481 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "9ad20d0d-637a-49d9-8a83-bfae0d7c2a37" (UID: "9ad20d0d-637a-49d9-8a83-bfae0d7c2a37"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:04:38 crc kubenswrapper[4899]: I1003 09:04:38.847364 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-kube-api-access-89rr9" (OuterVolumeSpecName: "kube-api-access-89rr9") pod "9ad20d0d-637a-49d9-8a83-bfae0d7c2a37" (UID: "9ad20d0d-637a-49d9-8a83-bfae0d7c2a37"). InnerVolumeSpecName "kube-api-access-89rr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:04:38 crc kubenswrapper[4899]: I1003 09:04:38.869715 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-inventory" (OuterVolumeSpecName: "inventory") pod "9ad20d0d-637a-49d9-8a83-bfae0d7c2a37" (UID: "9ad20d0d-637a-49d9-8a83-bfae0d7c2a37"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:04:38 crc kubenswrapper[4899]: I1003 09:04:38.870280 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9ad20d0d-637a-49d9-8a83-bfae0d7c2a37" (UID: "9ad20d0d-637a-49d9-8a83-bfae0d7c2a37"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:04:38 crc kubenswrapper[4899]: I1003 09:04:38.942764 4899 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 09:04:38 crc kubenswrapper[4899]: I1003 09:04:38.942814 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89rr9\" (UniqueName: \"kubernetes.io/projected/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-kube-api-access-89rr9\") on node \"crc\" DevicePath \"\"" Oct 03 09:04:38 crc kubenswrapper[4899]: I1003 09:04:38.942827 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 09:04:38 crc kubenswrapper[4899]: I1003 09:04:38.942838 4899 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9ad20d0d-637a-49d9-8a83-bfae0d7c2a37-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.289734 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" event={"ID":"9ad20d0d-637a-49d9-8a83-bfae0d7c2a37","Type":"ContainerDied","Data":"4105f3b4a54c08517b8e5d4fa08d7e7a6525ae912f824967bf45c92e4f5ab148"} Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.290077 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4105f3b4a54c08517b8e5d4fa08d7e7a6525ae912f824967bf45c92e4f5ab148" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.290199 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.408222 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn"] Oct 03 09:04:39 crc kubenswrapper[4899]: E1003 09:04:39.409199 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ad20d0d-637a-49d9-8a83-bfae0d7c2a37" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.409250 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ad20d0d-637a-49d9-8a83-bfae0d7c2a37" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.409592 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ad20d0d-637a-49d9-8a83-bfae0d7c2a37" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.411486 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.414127 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.414433 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.415878 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.417190 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.424411 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn"] Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.554925 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/521016b7-078a-42dd-bec6-739da052031b-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tskjn\" (UID: \"521016b7-078a-42dd-bec6-739da052031b\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.555633 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/521016b7-078a-42dd-bec6-739da052031b-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tskjn\" (UID: \"521016b7-078a-42dd-bec6-739da052031b\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.555838 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqpr5\" (UniqueName: \"kubernetes.io/projected/521016b7-078a-42dd-bec6-739da052031b-kube-api-access-zqpr5\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tskjn\" (UID: \"521016b7-078a-42dd-bec6-739da052031b\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.656690 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqpr5\" (UniqueName: \"kubernetes.io/projected/521016b7-078a-42dd-bec6-739da052031b-kube-api-access-zqpr5\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tskjn\" (UID: \"521016b7-078a-42dd-bec6-739da052031b\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.656762 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/521016b7-078a-42dd-bec6-739da052031b-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tskjn\" (UID: \"521016b7-078a-42dd-bec6-739da052031b\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.656830 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/521016b7-078a-42dd-bec6-739da052031b-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tskjn\" (UID: \"521016b7-078a-42dd-bec6-739da052031b\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.660632 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/521016b7-078a-42dd-bec6-739da052031b-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tskjn\" (UID: \"521016b7-078a-42dd-bec6-739da052031b\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.662090 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/521016b7-078a-42dd-bec6-739da052031b-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tskjn\" (UID: \"521016b7-078a-42dd-bec6-739da052031b\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.675728 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqpr5\" (UniqueName: \"kubernetes.io/projected/521016b7-078a-42dd-bec6-739da052031b-kube-api-access-zqpr5\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tskjn\" (UID: \"521016b7-078a-42dd-bec6-739da052031b\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn" Oct 03 09:04:39 crc kubenswrapper[4899]: I1003 09:04:39.731883 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn" Oct 03 09:04:40 crc kubenswrapper[4899]: W1003 09:04:40.222288 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod521016b7_078a_42dd_bec6_739da052031b.slice/crio-12e765c17e0a679ffb07fbc4135a534337394bf3c7ce81ba2a6c4aead913752e WatchSource:0}: Error finding container 12e765c17e0a679ffb07fbc4135a534337394bf3c7ce81ba2a6c4aead913752e: Status 404 returned error can't find the container with id 12e765c17e0a679ffb07fbc4135a534337394bf3c7ce81ba2a6c4aead913752e Oct 03 09:04:40 crc kubenswrapper[4899]: I1003 09:04:40.225664 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn"] Oct 03 09:04:40 crc kubenswrapper[4899]: I1003 09:04:40.299951 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn" event={"ID":"521016b7-078a-42dd-bec6-739da052031b","Type":"ContainerStarted","Data":"12e765c17e0a679ffb07fbc4135a534337394bf3c7ce81ba2a6c4aead913752e"} Oct 03 09:04:41 crc kubenswrapper[4899]: I1003 09:04:41.310234 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn" event={"ID":"521016b7-078a-42dd-bec6-739da052031b","Type":"ContainerStarted","Data":"f12da6b54583d06646ecc25ac9d8ce5c473278694b0f98d71f272d83b76fb45d"} Oct 03 09:04:41 crc kubenswrapper[4899]: I1003 09:04:41.325186 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn" podStartSLOduration=1.526435709 podStartE2EDuration="2.325169844s" podCreationTimestamp="2025-10-03 09:04:39 +0000 UTC" firstStartedPulling="2025-10-03 09:04:40.224579583 +0000 UTC m=+1454.332064536" lastFinishedPulling="2025-10-03 09:04:41.023313728 +0000 UTC m=+1455.130798671" observedRunningTime="2025-10-03 09:04:41.324443571 +0000 UTC m=+1455.431928524" watchObservedRunningTime="2025-10-03 09:04:41.325169844 +0000 UTC m=+1455.432654797" Oct 03 09:04:41 crc kubenswrapper[4899]: I1003 09:04:41.408636 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bgmpm" Oct 03 09:04:41 crc kubenswrapper[4899]: I1003 09:04:41.408685 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bgmpm" Oct 03 09:04:41 crc kubenswrapper[4899]: I1003 09:04:41.457332 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bgmpm" Oct 03 09:04:42 crc kubenswrapper[4899]: I1003 09:04:42.198824 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:04:42 crc kubenswrapper[4899]: I1003 09:04:42.199152 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:04:42 crc kubenswrapper[4899]: I1003 09:04:42.368107 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bgmpm" Oct 03 09:04:42 crc kubenswrapper[4899]: I1003 09:04:42.415075 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bgmpm"] Oct 03 09:04:44 crc kubenswrapper[4899]: I1003 09:04:44.338305 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bgmpm" podUID="c117d51d-dc35-418c-97bd-3eafdb30385e" containerName="registry-server" containerID="cri-o://e525f745f6d1101decc60300e3fd2d5fe264a364a274ded61581a753361e55aa" gracePeriod=2 Oct 03 09:04:44 crc kubenswrapper[4899]: I1003 09:04:44.832740 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bgmpm" Oct 03 09:04:44 crc kubenswrapper[4899]: I1003 09:04:44.859855 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xsmmb\" (UniqueName: \"kubernetes.io/projected/c117d51d-dc35-418c-97bd-3eafdb30385e-kube-api-access-xsmmb\") pod \"c117d51d-dc35-418c-97bd-3eafdb30385e\" (UID: \"c117d51d-dc35-418c-97bd-3eafdb30385e\") " Oct 03 09:04:44 crc kubenswrapper[4899]: I1003 09:04:44.860174 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c117d51d-dc35-418c-97bd-3eafdb30385e-catalog-content\") pod \"c117d51d-dc35-418c-97bd-3eafdb30385e\" (UID: \"c117d51d-dc35-418c-97bd-3eafdb30385e\") " Oct 03 09:04:44 crc kubenswrapper[4899]: I1003 09:04:44.860417 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c117d51d-dc35-418c-97bd-3eafdb30385e-utilities\") pod \"c117d51d-dc35-418c-97bd-3eafdb30385e\" (UID: \"c117d51d-dc35-418c-97bd-3eafdb30385e\") " Oct 03 09:04:44 crc kubenswrapper[4899]: I1003 09:04:44.861244 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c117d51d-dc35-418c-97bd-3eafdb30385e-utilities" (OuterVolumeSpecName: "utilities") pod "c117d51d-dc35-418c-97bd-3eafdb30385e" (UID: "c117d51d-dc35-418c-97bd-3eafdb30385e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:04:44 crc kubenswrapper[4899]: I1003 09:04:44.871128 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c117d51d-dc35-418c-97bd-3eafdb30385e-kube-api-access-xsmmb" (OuterVolumeSpecName: "kube-api-access-xsmmb") pod "c117d51d-dc35-418c-97bd-3eafdb30385e" (UID: "c117d51d-dc35-418c-97bd-3eafdb30385e"). InnerVolumeSpecName "kube-api-access-xsmmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:04:44 crc kubenswrapper[4899]: I1003 09:04:44.915696 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c117d51d-dc35-418c-97bd-3eafdb30385e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c117d51d-dc35-418c-97bd-3eafdb30385e" (UID: "c117d51d-dc35-418c-97bd-3eafdb30385e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:04:44 crc kubenswrapper[4899]: I1003 09:04:44.963872 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c117d51d-dc35-418c-97bd-3eafdb30385e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 09:04:44 crc kubenswrapper[4899]: I1003 09:04:44.963937 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c117d51d-dc35-418c-97bd-3eafdb30385e-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 09:04:44 crc kubenswrapper[4899]: I1003 09:04:44.963951 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xsmmb\" (UniqueName: \"kubernetes.io/projected/c117d51d-dc35-418c-97bd-3eafdb30385e-kube-api-access-xsmmb\") on node \"crc\" DevicePath \"\"" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.309601 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4dcb8"] Oct 03 09:04:45 crc kubenswrapper[4899]: E1003 09:04:45.310094 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c117d51d-dc35-418c-97bd-3eafdb30385e" containerName="extract-content" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.310116 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="c117d51d-dc35-418c-97bd-3eafdb30385e" containerName="extract-content" Oct 03 09:04:45 crc kubenswrapper[4899]: E1003 09:04:45.310151 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c117d51d-dc35-418c-97bd-3eafdb30385e" containerName="extract-utilities" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.310159 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="c117d51d-dc35-418c-97bd-3eafdb30385e" containerName="extract-utilities" Oct 03 09:04:45 crc kubenswrapper[4899]: E1003 09:04:45.310172 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c117d51d-dc35-418c-97bd-3eafdb30385e" containerName="registry-server" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.310179 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="c117d51d-dc35-418c-97bd-3eafdb30385e" containerName="registry-server" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.310383 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="c117d51d-dc35-418c-97bd-3eafdb30385e" containerName="registry-server" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.312063 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4dcb8" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.319222 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4dcb8"] Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.355821 4899 generic.go:334] "Generic (PLEG): container finished" podID="c117d51d-dc35-418c-97bd-3eafdb30385e" containerID="e525f745f6d1101decc60300e3fd2d5fe264a364a274ded61581a753361e55aa" exitCode=0 Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.355878 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgmpm" event={"ID":"c117d51d-dc35-418c-97bd-3eafdb30385e","Type":"ContainerDied","Data":"e525f745f6d1101decc60300e3fd2d5fe264a364a274ded61581a753361e55aa"} Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.355920 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgmpm" event={"ID":"c117d51d-dc35-418c-97bd-3eafdb30385e","Type":"ContainerDied","Data":"3999d9d519ae02dd8f3bbcdb6fdf1c109552bea16cf8adcc1b91daf3f8f5ca3d"} Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.355938 4899 scope.go:117] "RemoveContainer" containerID="e525f745f6d1101decc60300e3fd2d5fe264a364a274ded61581a753361e55aa" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.355963 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bgmpm" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.375344 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7fmz\" (UniqueName: \"kubernetes.io/projected/76024a34-58e2-48db-afd5-ccda76bc4386-kube-api-access-p7fmz\") pod \"redhat-operators-4dcb8\" (UID: \"76024a34-58e2-48db-afd5-ccda76bc4386\") " pod="openshift-marketplace/redhat-operators-4dcb8" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.375490 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76024a34-58e2-48db-afd5-ccda76bc4386-utilities\") pod \"redhat-operators-4dcb8\" (UID: \"76024a34-58e2-48db-afd5-ccda76bc4386\") " pod="openshift-marketplace/redhat-operators-4dcb8" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.375660 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76024a34-58e2-48db-afd5-ccda76bc4386-catalog-content\") pod \"redhat-operators-4dcb8\" (UID: \"76024a34-58e2-48db-afd5-ccda76bc4386\") " pod="openshift-marketplace/redhat-operators-4dcb8" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.389543 4899 scope.go:117] "RemoveContainer" containerID="82e74537325b36513a131ce49245ef70e8d9bfac00f258b936b8b6ecb60c45f6" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.407585 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bgmpm"] Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.417929 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bgmpm"] Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.432278 4899 scope.go:117] "RemoveContainer" containerID="d89419d8e3758485b3aa9d36e1a4d6cba0c21a90676aade5914d17e08a26c891" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.464098 4899 scope.go:117] "RemoveContainer" containerID="e525f745f6d1101decc60300e3fd2d5fe264a364a274ded61581a753361e55aa" Oct 03 09:04:45 crc kubenswrapper[4899]: E1003 09:04:45.464569 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e525f745f6d1101decc60300e3fd2d5fe264a364a274ded61581a753361e55aa\": container with ID starting with e525f745f6d1101decc60300e3fd2d5fe264a364a274ded61581a753361e55aa not found: ID does not exist" containerID="e525f745f6d1101decc60300e3fd2d5fe264a364a274ded61581a753361e55aa" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.464610 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e525f745f6d1101decc60300e3fd2d5fe264a364a274ded61581a753361e55aa"} err="failed to get container status \"e525f745f6d1101decc60300e3fd2d5fe264a364a274ded61581a753361e55aa\": rpc error: code = NotFound desc = could not find container \"e525f745f6d1101decc60300e3fd2d5fe264a364a274ded61581a753361e55aa\": container with ID starting with e525f745f6d1101decc60300e3fd2d5fe264a364a274ded61581a753361e55aa not found: ID does not exist" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.464638 4899 scope.go:117] "RemoveContainer" containerID="82e74537325b36513a131ce49245ef70e8d9bfac00f258b936b8b6ecb60c45f6" Oct 03 09:04:45 crc kubenswrapper[4899]: E1003 09:04:45.465185 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82e74537325b36513a131ce49245ef70e8d9bfac00f258b936b8b6ecb60c45f6\": container with ID starting with 82e74537325b36513a131ce49245ef70e8d9bfac00f258b936b8b6ecb60c45f6 not found: ID does not exist" containerID="82e74537325b36513a131ce49245ef70e8d9bfac00f258b936b8b6ecb60c45f6" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.465242 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82e74537325b36513a131ce49245ef70e8d9bfac00f258b936b8b6ecb60c45f6"} err="failed to get container status \"82e74537325b36513a131ce49245ef70e8d9bfac00f258b936b8b6ecb60c45f6\": rpc error: code = NotFound desc = could not find container \"82e74537325b36513a131ce49245ef70e8d9bfac00f258b936b8b6ecb60c45f6\": container with ID starting with 82e74537325b36513a131ce49245ef70e8d9bfac00f258b936b8b6ecb60c45f6 not found: ID does not exist" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.465261 4899 scope.go:117] "RemoveContainer" containerID="d89419d8e3758485b3aa9d36e1a4d6cba0c21a90676aade5914d17e08a26c891" Oct 03 09:04:45 crc kubenswrapper[4899]: E1003 09:04:45.465559 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d89419d8e3758485b3aa9d36e1a4d6cba0c21a90676aade5914d17e08a26c891\": container with ID starting with d89419d8e3758485b3aa9d36e1a4d6cba0c21a90676aade5914d17e08a26c891 not found: ID does not exist" containerID="d89419d8e3758485b3aa9d36e1a4d6cba0c21a90676aade5914d17e08a26c891" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.465595 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d89419d8e3758485b3aa9d36e1a4d6cba0c21a90676aade5914d17e08a26c891"} err="failed to get container status \"d89419d8e3758485b3aa9d36e1a4d6cba0c21a90676aade5914d17e08a26c891\": rpc error: code = NotFound desc = could not find container \"d89419d8e3758485b3aa9d36e1a4d6cba0c21a90676aade5914d17e08a26c891\": container with ID starting with d89419d8e3758485b3aa9d36e1a4d6cba0c21a90676aade5914d17e08a26c891 not found: ID does not exist" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.477436 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76024a34-58e2-48db-afd5-ccda76bc4386-catalog-content\") pod \"redhat-operators-4dcb8\" (UID: \"76024a34-58e2-48db-afd5-ccda76bc4386\") " pod="openshift-marketplace/redhat-operators-4dcb8" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.477584 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7fmz\" (UniqueName: \"kubernetes.io/projected/76024a34-58e2-48db-afd5-ccda76bc4386-kube-api-access-p7fmz\") pod \"redhat-operators-4dcb8\" (UID: \"76024a34-58e2-48db-afd5-ccda76bc4386\") " pod="openshift-marketplace/redhat-operators-4dcb8" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.477621 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76024a34-58e2-48db-afd5-ccda76bc4386-utilities\") pod \"redhat-operators-4dcb8\" (UID: \"76024a34-58e2-48db-afd5-ccda76bc4386\") " pod="openshift-marketplace/redhat-operators-4dcb8" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.478059 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76024a34-58e2-48db-afd5-ccda76bc4386-catalog-content\") pod \"redhat-operators-4dcb8\" (UID: \"76024a34-58e2-48db-afd5-ccda76bc4386\") " pod="openshift-marketplace/redhat-operators-4dcb8" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.478526 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76024a34-58e2-48db-afd5-ccda76bc4386-utilities\") pod \"redhat-operators-4dcb8\" (UID: \"76024a34-58e2-48db-afd5-ccda76bc4386\") " pod="openshift-marketplace/redhat-operators-4dcb8" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.496552 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7fmz\" (UniqueName: \"kubernetes.io/projected/76024a34-58e2-48db-afd5-ccda76bc4386-kube-api-access-p7fmz\") pod \"redhat-operators-4dcb8\" (UID: \"76024a34-58e2-48db-afd5-ccda76bc4386\") " pod="openshift-marketplace/redhat-operators-4dcb8" Oct 03 09:04:45 crc kubenswrapper[4899]: I1003 09:04:45.633987 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4dcb8" Oct 03 09:04:46 crc kubenswrapper[4899]: I1003 09:04:46.143677 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4dcb8"] Oct 03 09:04:46 crc kubenswrapper[4899]: W1003 09:04:46.144370 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod76024a34_58e2_48db_afd5_ccda76bc4386.slice/crio-1bb66f350ee04ff2fe4098a00f864af6c29b998b696a4ed8be17ef021bd09952 WatchSource:0}: Error finding container 1bb66f350ee04ff2fe4098a00f864af6c29b998b696a4ed8be17ef021bd09952: Status 404 returned error can't find the container with id 1bb66f350ee04ff2fe4098a00f864af6c29b998b696a4ed8be17ef021bd09952 Oct 03 09:04:46 crc kubenswrapper[4899]: I1003 09:04:46.365133 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4dcb8" event={"ID":"76024a34-58e2-48db-afd5-ccda76bc4386","Type":"ContainerStarted","Data":"1bb66f350ee04ff2fe4098a00f864af6c29b998b696a4ed8be17ef021bd09952"} Oct 03 09:04:46 crc kubenswrapper[4899]: I1003 09:04:46.537740 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c117d51d-dc35-418c-97bd-3eafdb30385e" path="/var/lib/kubelet/pods/c117d51d-dc35-418c-97bd-3eafdb30385e/volumes" Oct 03 09:04:47 crc kubenswrapper[4899]: I1003 09:04:47.376445 4899 generic.go:334] "Generic (PLEG): container finished" podID="76024a34-58e2-48db-afd5-ccda76bc4386" containerID="b9a8afc9e827fcd656cf375a89f98a8e1702e05bbd093c371c2cde2ae01d1dbe" exitCode=0 Oct 03 09:04:47 crc kubenswrapper[4899]: I1003 09:04:47.376533 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4dcb8" event={"ID":"76024a34-58e2-48db-afd5-ccda76bc4386","Type":"ContainerDied","Data":"b9a8afc9e827fcd656cf375a89f98a8e1702e05bbd093c371c2cde2ae01d1dbe"} Oct 03 09:04:49 crc kubenswrapper[4899]: I1003 09:04:49.396804 4899 generic.go:334] "Generic (PLEG): container finished" podID="76024a34-58e2-48db-afd5-ccda76bc4386" containerID="3cd21a7035c87307d8934b3c2dd69c76c1cd9b9999445b2d19f4e3922497851c" exitCode=0 Oct 03 09:04:49 crc kubenswrapper[4899]: I1003 09:04:49.396876 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4dcb8" event={"ID":"76024a34-58e2-48db-afd5-ccda76bc4386","Type":"ContainerDied","Data":"3cd21a7035c87307d8934b3c2dd69c76c1cd9b9999445b2d19f4e3922497851c"} Oct 03 09:04:55 crc kubenswrapper[4899]: I1003 09:04:55.464208 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4dcb8" event={"ID":"76024a34-58e2-48db-afd5-ccda76bc4386","Type":"ContainerStarted","Data":"b776055ff49a882cc6e2f239c315f189457098649f53278513dc8acecc3bb7ac"} Oct 03 09:04:55 crc kubenswrapper[4899]: I1003 09:04:55.486852 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4dcb8" podStartSLOduration=3.359983242 podStartE2EDuration="10.486829145s" podCreationTimestamp="2025-10-03 09:04:45 +0000 UTC" firstStartedPulling="2025-10-03 09:04:47.378432439 +0000 UTC m=+1461.485917392" lastFinishedPulling="2025-10-03 09:04:54.505278342 +0000 UTC m=+1468.612763295" observedRunningTime="2025-10-03 09:04:55.482366424 +0000 UTC m=+1469.589851377" watchObservedRunningTime="2025-10-03 09:04:55.486829145 +0000 UTC m=+1469.594314098" Oct 03 09:04:55 crc kubenswrapper[4899]: I1003 09:04:55.634375 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4dcb8" Oct 03 09:04:55 crc kubenswrapper[4899]: I1003 09:04:55.634444 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4dcb8" Oct 03 09:04:56 crc kubenswrapper[4899]: I1003 09:04:56.680221 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4dcb8" podUID="76024a34-58e2-48db-afd5-ccda76bc4386" containerName="registry-server" probeResult="failure" output=< Oct 03 09:04:56 crc kubenswrapper[4899]: timeout: failed to connect service ":50051" within 1s Oct 03 09:04:56 crc kubenswrapper[4899]: > Oct 03 09:05:05 crc kubenswrapper[4899]: I1003 09:05:05.680925 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4dcb8" Oct 03 09:05:05 crc kubenswrapper[4899]: I1003 09:05:05.731653 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4dcb8" Oct 03 09:05:05 crc kubenswrapper[4899]: I1003 09:05:05.934876 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4dcb8"] Oct 03 09:05:07 crc kubenswrapper[4899]: I1003 09:05:07.562303 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4dcb8" podUID="76024a34-58e2-48db-afd5-ccda76bc4386" containerName="registry-server" containerID="cri-o://b776055ff49a882cc6e2f239c315f189457098649f53278513dc8acecc3bb7ac" gracePeriod=2 Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.521873 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4dcb8" Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.571999 4899 generic.go:334] "Generic (PLEG): container finished" podID="76024a34-58e2-48db-afd5-ccda76bc4386" containerID="b776055ff49a882cc6e2f239c315f189457098649f53278513dc8acecc3bb7ac" exitCode=0 Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.572035 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4dcb8" event={"ID":"76024a34-58e2-48db-afd5-ccda76bc4386","Type":"ContainerDied","Data":"b776055ff49a882cc6e2f239c315f189457098649f53278513dc8acecc3bb7ac"} Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.572064 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4dcb8" Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.572079 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4dcb8" event={"ID":"76024a34-58e2-48db-afd5-ccda76bc4386","Type":"ContainerDied","Data":"1bb66f350ee04ff2fe4098a00f864af6c29b998b696a4ed8be17ef021bd09952"} Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.572100 4899 scope.go:117] "RemoveContainer" containerID="b776055ff49a882cc6e2f239c315f189457098649f53278513dc8acecc3bb7ac" Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.601727 4899 scope.go:117] "RemoveContainer" containerID="3cd21a7035c87307d8934b3c2dd69c76c1cd9b9999445b2d19f4e3922497851c" Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.614986 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76024a34-58e2-48db-afd5-ccda76bc4386-utilities\") pod \"76024a34-58e2-48db-afd5-ccda76bc4386\" (UID: \"76024a34-58e2-48db-afd5-ccda76bc4386\") " Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.615046 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76024a34-58e2-48db-afd5-ccda76bc4386-catalog-content\") pod \"76024a34-58e2-48db-afd5-ccda76bc4386\" (UID: \"76024a34-58e2-48db-afd5-ccda76bc4386\") " Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.615297 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7fmz\" (UniqueName: \"kubernetes.io/projected/76024a34-58e2-48db-afd5-ccda76bc4386-kube-api-access-p7fmz\") pod \"76024a34-58e2-48db-afd5-ccda76bc4386\" (UID: \"76024a34-58e2-48db-afd5-ccda76bc4386\") " Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.617171 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76024a34-58e2-48db-afd5-ccda76bc4386-utilities" (OuterVolumeSpecName: "utilities") pod "76024a34-58e2-48db-afd5-ccda76bc4386" (UID: "76024a34-58e2-48db-afd5-ccda76bc4386"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.628636 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76024a34-58e2-48db-afd5-ccda76bc4386-kube-api-access-p7fmz" (OuterVolumeSpecName: "kube-api-access-p7fmz") pod "76024a34-58e2-48db-afd5-ccda76bc4386" (UID: "76024a34-58e2-48db-afd5-ccda76bc4386"). InnerVolumeSpecName "kube-api-access-p7fmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.634126 4899 scope.go:117] "RemoveContainer" containerID="b9a8afc9e827fcd656cf375a89f98a8e1702e05bbd093c371c2cde2ae01d1dbe" Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.703120 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76024a34-58e2-48db-afd5-ccda76bc4386-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "76024a34-58e2-48db-afd5-ccda76bc4386" (UID: "76024a34-58e2-48db-afd5-ccda76bc4386"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.704198 4899 scope.go:117] "RemoveContainer" containerID="b776055ff49a882cc6e2f239c315f189457098649f53278513dc8acecc3bb7ac" Oct 03 09:05:08 crc kubenswrapper[4899]: E1003 09:05:08.704628 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b776055ff49a882cc6e2f239c315f189457098649f53278513dc8acecc3bb7ac\": container with ID starting with b776055ff49a882cc6e2f239c315f189457098649f53278513dc8acecc3bb7ac not found: ID does not exist" containerID="b776055ff49a882cc6e2f239c315f189457098649f53278513dc8acecc3bb7ac" Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.704657 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b776055ff49a882cc6e2f239c315f189457098649f53278513dc8acecc3bb7ac"} err="failed to get container status \"b776055ff49a882cc6e2f239c315f189457098649f53278513dc8acecc3bb7ac\": rpc error: code = NotFound desc = could not find container \"b776055ff49a882cc6e2f239c315f189457098649f53278513dc8acecc3bb7ac\": container with ID starting with b776055ff49a882cc6e2f239c315f189457098649f53278513dc8acecc3bb7ac not found: ID does not exist" Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.704678 4899 scope.go:117] "RemoveContainer" containerID="3cd21a7035c87307d8934b3c2dd69c76c1cd9b9999445b2d19f4e3922497851c" Oct 03 09:05:08 crc kubenswrapper[4899]: E1003 09:05:08.705116 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cd21a7035c87307d8934b3c2dd69c76c1cd9b9999445b2d19f4e3922497851c\": container with ID starting with 3cd21a7035c87307d8934b3c2dd69c76c1cd9b9999445b2d19f4e3922497851c not found: ID does not exist" containerID="3cd21a7035c87307d8934b3c2dd69c76c1cd9b9999445b2d19f4e3922497851c" Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.705136 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cd21a7035c87307d8934b3c2dd69c76c1cd9b9999445b2d19f4e3922497851c"} err="failed to get container status \"3cd21a7035c87307d8934b3c2dd69c76c1cd9b9999445b2d19f4e3922497851c\": rpc error: code = NotFound desc = could not find container \"3cd21a7035c87307d8934b3c2dd69c76c1cd9b9999445b2d19f4e3922497851c\": container with ID starting with 3cd21a7035c87307d8934b3c2dd69c76c1cd9b9999445b2d19f4e3922497851c not found: ID does not exist" Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.705167 4899 scope.go:117] "RemoveContainer" containerID="b9a8afc9e827fcd656cf375a89f98a8e1702e05bbd093c371c2cde2ae01d1dbe" Oct 03 09:05:08 crc kubenswrapper[4899]: E1003 09:05:08.705487 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9a8afc9e827fcd656cf375a89f98a8e1702e05bbd093c371c2cde2ae01d1dbe\": container with ID starting with b9a8afc9e827fcd656cf375a89f98a8e1702e05bbd093c371c2cde2ae01d1dbe not found: ID does not exist" containerID="b9a8afc9e827fcd656cf375a89f98a8e1702e05bbd093c371c2cde2ae01d1dbe" Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.705515 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9a8afc9e827fcd656cf375a89f98a8e1702e05bbd093c371c2cde2ae01d1dbe"} err="failed to get container status \"b9a8afc9e827fcd656cf375a89f98a8e1702e05bbd093c371c2cde2ae01d1dbe\": rpc error: code = NotFound desc = could not find container \"b9a8afc9e827fcd656cf375a89f98a8e1702e05bbd093c371c2cde2ae01d1dbe\": container with ID starting with b9a8afc9e827fcd656cf375a89f98a8e1702e05bbd093c371c2cde2ae01d1dbe not found: ID does not exist" Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.717042 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7fmz\" (UniqueName: \"kubernetes.io/projected/76024a34-58e2-48db-afd5-ccda76bc4386-kube-api-access-p7fmz\") on node \"crc\" DevicePath \"\"" Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.717115 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76024a34-58e2-48db-afd5-ccda76bc4386-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.717125 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76024a34-58e2-48db-afd5-ccda76bc4386-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.904791 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4dcb8"] Oct 03 09:05:08 crc kubenswrapper[4899]: I1003 09:05:08.913681 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4dcb8"] Oct 03 09:05:10 crc kubenswrapper[4899]: I1003 09:05:10.536954 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76024a34-58e2-48db-afd5-ccda76bc4386" path="/var/lib/kubelet/pods/76024a34-58e2-48db-afd5-ccda76bc4386/volumes" Oct 03 09:05:12 crc kubenswrapper[4899]: I1003 09:05:12.198033 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:05:12 crc kubenswrapper[4899]: I1003 09:05:12.198366 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:05:34 crc kubenswrapper[4899]: I1003 09:05:34.041510 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-vhnnx"] Oct 03 09:05:34 crc kubenswrapper[4899]: I1003 09:05:34.052348 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-vhnnx"] Oct 03 09:05:34 crc kubenswrapper[4899]: I1003 09:05:34.541303 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcceacb3-5a24-43fc-b90b-036ae2fd715a" path="/var/lib/kubelet/pods/dcceacb3-5a24-43fc-b90b-036ae2fd715a/volumes" Oct 03 09:05:35 crc kubenswrapper[4899]: I1003 09:05:35.037672 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-5l7x5"] Oct 03 09:05:35 crc kubenswrapper[4899]: I1003 09:05:35.050131 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-kjd74"] Oct 03 09:05:35 crc kubenswrapper[4899]: I1003 09:05:35.058114 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-kjd74"] Oct 03 09:05:35 crc kubenswrapper[4899]: I1003 09:05:35.068272 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-5l7x5"] Oct 03 09:05:36 crc kubenswrapper[4899]: I1003 09:05:36.539208 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16d9f858-ae62-4d37-b2de-97fb7d70d6a3" path="/var/lib/kubelet/pods/16d9f858-ae62-4d37-b2de-97fb7d70d6a3/volumes" Oct 03 09:05:36 crc kubenswrapper[4899]: I1003 09:05:36.540049 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="381e208b-07bc-42e2-88b1-99ae1ae4f907" path="/var/lib/kubelet/pods/381e208b-07bc-42e2-88b1-99ae1ae4f907/volumes" Oct 03 09:05:42 crc kubenswrapper[4899]: I1003 09:05:42.050343 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-1db7-account-create-jljht"] Oct 03 09:05:42 crc kubenswrapper[4899]: I1003 09:05:42.059308 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-1db7-account-create-jljht"] Oct 03 09:05:42 crc kubenswrapper[4899]: I1003 09:05:42.198013 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:05:42 crc kubenswrapper[4899]: I1003 09:05:42.198071 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:05:42 crc kubenswrapper[4899]: I1003 09:05:42.198117 4899 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 09:05:42 crc kubenswrapper[4899]: I1003 09:05:42.198813 4899 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b"} pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 09:05:42 crc kubenswrapper[4899]: I1003 09:05:42.198877 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" containerID="cri-o://7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" gracePeriod=600 Oct 03 09:05:42 crc kubenswrapper[4899]: E1003 09:05:42.361195 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:05:42 crc kubenswrapper[4899]: I1003 09:05:42.538418 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61fd0b37-2c03-4fde-a030-84387c3a10c0" path="/var/lib/kubelet/pods/61fd0b37-2c03-4fde-a030-84387c3a10c0/volumes" Oct 03 09:05:42 crc kubenswrapper[4899]: I1003 09:05:42.851410 4899 generic.go:334] "Generic (PLEG): container finished" podID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" exitCode=0 Oct 03 09:05:42 crc kubenswrapper[4899]: I1003 09:05:42.851465 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerDied","Data":"7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b"} Oct 03 09:05:42 crc kubenswrapper[4899]: I1003 09:05:42.851506 4899 scope.go:117] "RemoveContainer" containerID="def1a55ad00f7229a571d38a4260404a613179a181d031abe241127ae4349c21" Oct 03 09:05:42 crc kubenswrapper[4899]: I1003 09:05:42.852258 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:05:42 crc kubenswrapper[4899]: E1003 09:05:42.852522 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:05:47 crc kubenswrapper[4899]: I1003 09:05:47.986843 4899 scope.go:117] "RemoveContainer" containerID="047b06b08e76bba97016860a234f15127b5c44f6bb356006139af0faf0ff868f" Oct 03 09:05:48 crc kubenswrapper[4899]: I1003 09:05:48.008237 4899 scope.go:117] "RemoveContainer" containerID="a060b39b0ba8ee23d110bfe8b99d60fbb2b8beb754ebf9e2a56d6d22b341f980" Oct 03 09:05:48 crc kubenswrapper[4899]: I1003 09:05:48.056754 4899 scope.go:117] "RemoveContainer" containerID="69e306ba4bb82944f7302882da65f0fb73befd12eba48c6160a6985e1568a072" Oct 03 09:05:48 crc kubenswrapper[4899]: I1003 09:05:48.106613 4899 scope.go:117] "RemoveContainer" containerID="150fc358dde7215bc721566308068d85fc9d654342f7f6f39fdea20229ed9fd3" Oct 03 09:05:53 crc kubenswrapper[4899]: I1003 09:05:53.526931 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:05:53 crc kubenswrapper[4899]: E1003 09:05:53.527835 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:05:56 crc kubenswrapper[4899]: I1003 09:05:56.036971 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-c734-account-create-f7xq6"] Oct 03 09:05:56 crc kubenswrapper[4899]: I1003 09:05:56.048954 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-d5d9-account-create-xkb58"] Oct 03 09:05:56 crc kubenswrapper[4899]: I1003 09:05:56.058789 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-d5d9-account-create-xkb58"] Oct 03 09:05:56 crc kubenswrapper[4899]: I1003 09:05:56.068580 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-c734-account-create-f7xq6"] Oct 03 09:05:56 crc kubenswrapper[4899]: I1003 09:05:56.539475 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fd15087-4144-4272-9136-1537ce09cde1" path="/var/lib/kubelet/pods/2fd15087-4144-4272-9136-1537ce09cde1/volumes" Oct 03 09:05:56 crc kubenswrapper[4899]: I1003 09:05:56.540036 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9d2a024-e6cb-459b-baaa-98e13e6c46f2" path="/var/lib/kubelet/pods/b9d2a024-e6cb-459b-baaa-98e13e6c46f2/volumes" Oct 03 09:05:59 crc kubenswrapper[4899]: I1003 09:05:59.044761 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-p88zw"] Oct 03 09:05:59 crc kubenswrapper[4899]: I1003 09:05:59.055677 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-bb2l9"] Oct 03 09:05:59 crc kubenswrapper[4899]: I1003 09:05:59.066202 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-sd9hr"] Oct 03 09:05:59 crc kubenswrapper[4899]: I1003 09:05:59.076476 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-p88zw"] Oct 03 09:05:59 crc kubenswrapper[4899]: I1003 09:05:59.087506 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-bb2l9"] Oct 03 09:05:59 crc kubenswrapper[4899]: I1003 09:05:59.097418 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-sd9hr"] Oct 03 09:06:00 crc kubenswrapper[4899]: I1003 09:06:00.541573 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="048b8d8c-402b-4ab3-bda5-847c2f8f6d9c" path="/var/lib/kubelet/pods/048b8d8c-402b-4ab3-bda5-847c2f8f6d9c/volumes" Oct 03 09:06:00 crc kubenswrapper[4899]: I1003 09:06:00.542909 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d60b9a27-3601-451c-8e43-4329e0bd9a78" path="/var/lib/kubelet/pods/d60b9a27-3601-451c-8e43-4329e0bd9a78/volumes" Oct 03 09:06:00 crc kubenswrapper[4899]: I1003 09:06:00.543410 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de7dc4fe-1a51-4238-a8bf-3651a6cd28bc" path="/var/lib/kubelet/pods/de7dc4fe-1a51-4238-a8bf-3651a6cd28bc/volumes" Oct 03 09:06:06 crc kubenswrapper[4899]: I1003 09:06:06.030963 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-r7dz5"] Oct 03 09:06:06 crc kubenswrapper[4899]: I1003 09:06:06.043570 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-r7dz5"] Oct 03 09:06:06 crc kubenswrapper[4899]: I1003 09:06:06.533179 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:06:06 crc kubenswrapper[4899]: E1003 09:06:06.533446 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:06:06 crc kubenswrapper[4899]: I1003 09:06:06.539844 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e95726c5-31b4-47eb-9e32-eec52266c460" path="/var/lib/kubelet/pods/e95726c5-31b4-47eb-9e32-eec52266c460/volumes" Oct 03 09:06:09 crc kubenswrapper[4899]: I1003 09:06:09.044824 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-581b-account-create-tbmqr"] Oct 03 09:06:09 crc kubenswrapper[4899]: I1003 09:06:09.056374 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-1c09-account-create-qnsmw"] Oct 03 09:06:09 crc kubenswrapper[4899]: I1003 09:06:09.065651 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-cff2w"] Oct 03 09:06:09 crc kubenswrapper[4899]: I1003 09:06:09.074558 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-10c1-account-create-jgc9k"] Oct 03 09:06:09 crc kubenswrapper[4899]: I1003 09:06:09.084522 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-581b-account-create-tbmqr"] Oct 03 09:06:09 crc kubenswrapper[4899]: I1003 09:06:09.093944 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-1c09-account-create-qnsmw"] Oct 03 09:06:09 crc kubenswrapper[4899]: I1003 09:06:09.103372 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-10c1-account-create-jgc9k"] Oct 03 09:06:09 crc kubenswrapper[4899]: I1003 09:06:09.111579 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-cff2w"] Oct 03 09:06:10 crc kubenswrapper[4899]: I1003 09:06:10.537690 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="237108cb-238e-4abc-a099-60c1ce478004" path="/var/lib/kubelet/pods/237108cb-238e-4abc-a099-60c1ce478004/volumes" Oct 03 09:06:10 crc kubenswrapper[4899]: I1003 09:06:10.538759 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a" path="/var/lib/kubelet/pods/9a47a104-3fdb-4f24-8fd4-ec9b8b6dca8a/volumes" Oct 03 09:06:10 crc kubenswrapper[4899]: I1003 09:06:10.539402 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7891f02-e1ab-4c31-a210-971c66974961" path="/var/lib/kubelet/pods/a7891f02-e1ab-4c31-a210-971c66974961/volumes" Oct 03 09:06:10 crc kubenswrapper[4899]: I1003 09:06:10.540103 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9905523-1414-4f5e-958d-22ffc752c061" path="/var/lib/kubelet/pods/d9905523-1414-4f5e-958d-22ffc752c061/volumes" Oct 03 09:06:17 crc kubenswrapper[4899]: I1003 09:06:17.527574 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:06:17 crc kubenswrapper[4899]: E1003 09:06:17.528607 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:06:21 crc kubenswrapper[4899]: I1003 09:06:21.223727 4899 generic.go:334] "Generic (PLEG): container finished" podID="521016b7-078a-42dd-bec6-739da052031b" containerID="f12da6b54583d06646ecc25ac9d8ce5c473278694b0f98d71f272d83b76fb45d" exitCode=0 Oct 03 09:06:21 crc kubenswrapper[4899]: I1003 09:06:21.223781 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn" event={"ID":"521016b7-078a-42dd-bec6-739da052031b","Type":"ContainerDied","Data":"f12da6b54583d06646ecc25ac9d8ce5c473278694b0f98d71f272d83b76fb45d"} Oct 03 09:06:22 crc kubenswrapper[4899]: I1003 09:06:22.627131 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn" Oct 03 09:06:22 crc kubenswrapper[4899]: I1003 09:06:22.728410 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zqpr5\" (UniqueName: \"kubernetes.io/projected/521016b7-078a-42dd-bec6-739da052031b-kube-api-access-zqpr5\") pod \"521016b7-078a-42dd-bec6-739da052031b\" (UID: \"521016b7-078a-42dd-bec6-739da052031b\") " Oct 03 09:06:22 crc kubenswrapper[4899]: I1003 09:06:22.728694 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/521016b7-078a-42dd-bec6-739da052031b-ssh-key\") pod \"521016b7-078a-42dd-bec6-739da052031b\" (UID: \"521016b7-078a-42dd-bec6-739da052031b\") " Oct 03 09:06:22 crc kubenswrapper[4899]: I1003 09:06:22.728974 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/521016b7-078a-42dd-bec6-739da052031b-inventory\") pod \"521016b7-078a-42dd-bec6-739da052031b\" (UID: \"521016b7-078a-42dd-bec6-739da052031b\") " Oct 03 09:06:22 crc kubenswrapper[4899]: I1003 09:06:22.736196 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/521016b7-078a-42dd-bec6-739da052031b-kube-api-access-zqpr5" (OuterVolumeSpecName: "kube-api-access-zqpr5") pod "521016b7-078a-42dd-bec6-739da052031b" (UID: "521016b7-078a-42dd-bec6-739da052031b"). InnerVolumeSpecName "kube-api-access-zqpr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:06:22 crc kubenswrapper[4899]: I1003 09:06:22.758585 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/521016b7-078a-42dd-bec6-739da052031b-inventory" (OuterVolumeSpecName: "inventory") pod "521016b7-078a-42dd-bec6-739da052031b" (UID: "521016b7-078a-42dd-bec6-739da052031b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:06:22 crc kubenswrapper[4899]: I1003 09:06:22.763061 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/521016b7-078a-42dd-bec6-739da052031b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "521016b7-078a-42dd-bec6-739da052031b" (UID: "521016b7-078a-42dd-bec6-739da052031b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:06:22 crc kubenswrapper[4899]: I1003 09:06:22.831174 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/521016b7-078a-42dd-bec6-739da052031b-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 09:06:22 crc kubenswrapper[4899]: I1003 09:06:22.831206 4899 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/521016b7-078a-42dd-bec6-739da052031b-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 09:06:22 crc kubenswrapper[4899]: I1003 09:06:22.831216 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zqpr5\" (UniqueName: \"kubernetes.io/projected/521016b7-078a-42dd-bec6-739da052031b-kube-api-access-zqpr5\") on node \"crc\" DevicePath \"\"" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.243200 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn" event={"ID":"521016b7-078a-42dd-bec6-739da052031b","Type":"ContainerDied","Data":"12e765c17e0a679ffb07fbc4135a534337394bf3c7ce81ba2a6c4aead913752e"} Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.243258 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12e765c17e0a679ffb07fbc4135a534337394bf3c7ce81ba2a6c4aead913752e" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.243291 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tskjn" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.332906 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699"] Oct 03 09:06:23 crc kubenswrapper[4899]: E1003 09:06:23.333412 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="521016b7-078a-42dd-bec6-739da052031b" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.333438 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="521016b7-078a-42dd-bec6-739da052031b" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 03 09:06:23 crc kubenswrapper[4899]: E1003 09:06:23.333466 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76024a34-58e2-48db-afd5-ccda76bc4386" containerName="extract-content" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.333476 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="76024a34-58e2-48db-afd5-ccda76bc4386" containerName="extract-content" Oct 03 09:06:23 crc kubenswrapper[4899]: E1003 09:06:23.333492 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76024a34-58e2-48db-afd5-ccda76bc4386" containerName="extract-utilities" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.333500 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="76024a34-58e2-48db-afd5-ccda76bc4386" containerName="extract-utilities" Oct 03 09:06:23 crc kubenswrapper[4899]: E1003 09:06:23.333532 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76024a34-58e2-48db-afd5-ccda76bc4386" containerName="registry-server" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.333541 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="76024a34-58e2-48db-afd5-ccda76bc4386" containerName="registry-server" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.333750 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="521016b7-078a-42dd-bec6-739da052031b" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.333775 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="76024a34-58e2-48db-afd5-ccda76bc4386" containerName="registry-server" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.334594 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.341047 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.342584 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.342849 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.343447 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.349752 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699"] Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.443226 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fef8b1ca-fe97-49df-9d53-89edfaa3d12a-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7l699\" (UID: \"fef8b1ca-fe97-49df-9d53-89edfaa3d12a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.443304 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fef8b1ca-fe97-49df-9d53-89edfaa3d12a-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7l699\" (UID: \"fef8b1ca-fe97-49df-9d53-89edfaa3d12a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.443493 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2mxz\" (UniqueName: \"kubernetes.io/projected/fef8b1ca-fe97-49df-9d53-89edfaa3d12a-kube-api-access-f2mxz\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7l699\" (UID: \"fef8b1ca-fe97-49df-9d53-89edfaa3d12a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.544710 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2mxz\" (UniqueName: \"kubernetes.io/projected/fef8b1ca-fe97-49df-9d53-89edfaa3d12a-kube-api-access-f2mxz\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7l699\" (UID: \"fef8b1ca-fe97-49df-9d53-89edfaa3d12a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.544821 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fef8b1ca-fe97-49df-9d53-89edfaa3d12a-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7l699\" (UID: \"fef8b1ca-fe97-49df-9d53-89edfaa3d12a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.544847 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fef8b1ca-fe97-49df-9d53-89edfaa3d12a-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7l699\" (UID: \"fef8b1ca-fe97-49df-9d53-89edfaa3d12a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.548851 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fef8b1ca-fe97-49df-9d53-89edfaa3d12a-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7l699\" (UID: \"fef8b1ca-fe97-49df-9d53-89edfaa3d12a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.551230 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fef8b1ca-fe97-49df-9d53-89edfaa3d12a-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7l699\" (UID: \"fef8b1ca-fe97-49df-9d53-89edfaa3d12a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.562016 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2mxz\" (UniqueName: \"kubernetes.io/projected/fef8b1ca-fe97-49df-9d53-89edfaa3d12a-kube-api-access-f2mxz\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7l699\" (UID: \"fef8b1ca-fe97-49df-9d53-89edfaa3d12a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699" Oct 03 09:06:23 crc kubenswrapper[4899]: I1003 09:06:23.664108 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699" Oct 03 09:06:24 crc kubenswrapper[4899]: I1003 09:06:24.154173 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699"] Oct 03 09:06:24 crc kubenswrapper[4899]: I1003 09:06:24.159287 4899 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 09:06:24 crc kubenswrapper[4899]: I1003 09:06:24.255068 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699" event={"ID":"fef8b1ca-fe97-49df-9d53-89edfaa3d12a","Type":"ContainerStarted","Data":"9c48f66d45fcb8058f338c0edd0e43c91cb7dace945ac5ec1326d77678d07c71"} Oct 03 09:06:25 crc kubenswrapper[4899]: I1003 09:06:25.266461 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699" event={"ID":"fef8b1ca-fe97-49df-9d53-89edfaa3d12a","Type":"ContainerStarted","Data":"ab44d12f7da5e5cbc979ac658602ebe59d2dc05bbacf4e50c46a980ee462c62a"} Oct 03 09:06:32 crc kubenswrapper[4899]: I1003 09:06:32.526590 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:06:32 crc kubenswrapper[4899]: E1003 09:06:32.527323 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:06:42 crc kubenswrapper[4899]: I1003 09:06:42.031272 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699" podStartSLOduration=18.374733742 podStartE2EDuration="19.031256302s" podCreationTimestamp="2025-10-03 09:06:23 +0000 UTC" firstStartedPulling="2025-10-03 09:06:24.159082374 +0000 UTC m=+1558.266567327" lastFinishedPulling="2025-10-03 09:06:24.815604934 +0000 UTC m=+1558.923089887" observedRunningTime="2025-10-03 09:06:25.283395424 +0000 UTC m=+1559.390880377" watchObservedRunningTime="2025-10-03 09:06:42.031256302 +0000 UTC m=+1576.138741255" Oct 03 09:06:42 crc kubenswrapper[4899]: I1003 09:06:42.042428 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-rh6dp"] Oct 03 09:06:42 crc kubenswrapper[4899]: I1003 09:06:42.053090 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-rh6dp"] Oct 03 09:06:42 crc kubenswrapper[4899]: I1003 09:06:42.538878 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2decdc4-50c4-4370-8551-fe73d6918bcd" path="/var/lib/kubelet/pods/c2decdc4-50c4-4370-8551-fe73d6918bcd/volumes" Oct 03 09:06:43 crc kubenswrapper[4899]: I1003 09:06:43.527451 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:06:43 crc kubenswrapper[4899]: E1003 09:06:43.528011 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:06:48 crc kubenswrapper[4899]: I1003 09:06:48.256399 4899 scope.go:117] "RemoveContainer" containerID="12ff563a9c461311dacf4abc2193e35815a9dccd835f6edfe2ea00f82baecc3a" Oct 03 09:06:48 crc kubenswrapper[4899]: I1003 09:06:48.321114 4899 scope.go:117] "RemoveContainer" containerID="dc1d8f34db4c41a1dd1cf1d211a3c94f4a202e7eecb4b65e5c12c447b61e0f8a" Oct 03 09:06:48 crc kubenswrapper[4899]: I1003 09:06:48.358307 4899 scope.go:117] "RemoveContainer" containerID="6c7947c84c25547b543e8e74b8151e5204407a9b2c25ee9645290127cee1a8fe" Oct 03 09:06:48 crc kubenswrapper[4899]: I1003 09:06:48.391465 4899 scope.go:117] "RemoveContainer" containerID="c9168264cd0eb8375208d94c953dccc869df245f6de94d6190e4a0d6241d6419" Oct 03 09:06:48 crc kubenswrapper[4899]: I1003 09:06:48.438428 4899 scope.go:117] "RemoveContainer" containerID="7e4717f08297f6e535ff8f209cebedd1559de0f9904ec33aa861120513b406e0" Oct 03 09:06:48 crc kubenswrapper[4899]: I1003 09:06:48.489527 4899 scope.go:117] "RemoveContainer" containerID="b5977bbe81aaf40a87a557eaf4e8f2f6ceb859ac2843377a8ca4d677cd269bd4" Oct 03 09:06:48 crc kubenswrapper[4899]: I1003 09:06:48.527880 4899 scope.go:117] "RemoveContainer" containerID="0b2ba05d043d16e2df9c8b68595e454897d7238e0886fb07a1990d80402ce575" Oct 03 09:06:48 crc kubenswrapper[4899]: I1003 09:06:48.556608 4899 scope.go:117] "RemoveContainer" containerID="dca5f89608b5bf5fccc25bf3d0544bcc7822101647485fe404a3c913d668c87c" Oct 03 09:06:48 crc kubenswrapper[4899]: I1003 09:06:48.575796 4899 scope.go:117] "RemoveContainer" containerID="cf3f56777ba87224ce7d6f600a81bda96f390259baa2944388ade315a271a660" Oct 03 09:06:48 crc kubenswrapper[4899]: I1003 09:06:48.593735 4899 scope.go:117] "RemoveContainer" containerID="861aaae4a6a7eb9b804be54e689589866de6ddbf6702b0e8991640380ed7a9aa" Oct 03 09:06:48 crc kubenswrapper[4899]: I1003 09:06:48.616696 4899 scope.go:117] "RemoveContainer" containerID="379fe7de065d4009245ffe65b55403c1c91dc0d22eabde5774e6bc860a1e47a3" Oct 03 09:06:56 crc kubenswrapper[4899]: I1003 09:06:56.533264 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:06:56 crc kubenswrapper[4899]: E1003 09:06:56.535447 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:06:59 crc kubenswrapper[4899]: I1003 09:06:59.056449 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-5rpwq"] Oct 03 09:06:59 crc kubenswrapper[4899]: I1003 09:06:59.082190 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-jf5f9"] Oct 03 09:06:59 crc kubenswrapper[4899]: I1003 09:06:59.092718 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-69lhb"] Oct 03 09:06:59 crc kubenswrapper[4899]: I1003 09:06:59.101841 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-5rpwq"] Oct 03 09:06:59 crc kubenswrapper[4899]: I1003 09:06:59.110431 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-69lhb"] Oct 03 09:06:59 crc kubenswrapper[4899]: I1003 09:06:59.142589 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-jf5f9"] Oct 03 09:07:00 crc kubenswrapper[4899]: I1003 09:07:00.537417 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="194b2a4b-c8aa-4590-a400-f54b03904ecf" path="/var/lib/kubelet/pods/194b2a4b-c8aa-4590-a400-f54b03904ecf/volumes" Oct 03 09:07:00 crc kubenswrapper[4899]: I1003 09:07:00.538311 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4663e6c0-d0e2-49f9-a457-8bc02fefa635" path="/var/lib/kubelet/pods/4663e6c0-d0e2-49f9-a457-8bc02fefa635/volumes" Oct 03 09:07:00 crc kubenswrapper[4899]: I1003 09:07:00.538870 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d0954fe-c339-493b-a2ca-5d30b54bc603" path="/var/lib/kubelet/pods/5d0954fe-c339-493b-a2ca-5d30b54bc603/volumes" Oct 03 09:07:09 crc kubenswrapper[4899]: I1003 09:07:09.527092 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:07:09 crc kubenswrapper[4899]: E1003 09:07:09.527801 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:07:10 crc kubenswrapper[4899]: I1003 09:07:10.026267 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-q9f5f"] Oct 03 09:07:10 crc kubenswrapper[4899]: I1003 09:07:10.033666 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-q9f5f"] Oct 03 09:07:10 crc kubenswrapper[4899]: I1003 09:07:10.537360 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5545cd7a-7849-48e5-91f3-6a3a8d51e665" path="/var/lib/kubelet/pods/5545cd7a-7849-48e5-91f3-6a3a8d51e665/volumes" Oct 03 09:07:23 crc kubenswrapper[4899]: I1003 09:07:23.526977 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:07:23 crc kubenswrapper[4899]: E1003 09:07:23.527754 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:07:35 crc kubenswrapper[4899]: I1003 09:07:35.885336 4899 generic.go:334] "Generic (PLEG): container finished" podID="fef8b1ca-fe97-49df-9d53-89edfaa3d12a" containerID="ab44d12f7da5e5cbc979ac658602ebe59d2dc05bbacf4e50c46a980ee462c62a" exitCode=0 Oct 03 09:07:35 crc kubenswrapper[4899]: I1003 09:07:35.885429 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699" event={"ID":"fef8b1ca-fe97-49df-9d53-89edfaa3d12a","Type":"ContainerDied","Data":"ab44d12f7da5e5cbc979ac658602ebe59d2dc05bbacf4e50c46a980ee462c62a"} Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.287369 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699" Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.473099 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fef8b1ca-fe97-49df-9d53-89edfaa3d12a-ssh-key\") pod \"fef8b1ca-fe97-49df-9d53-89edfaa3d12a\" (UID: \"fef8b1ca-fe97-49df-9d53-89edfaa3d12a\") " Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.473170 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fef8b1ca-fe97-49df-9d53-89edfaa3d12a-inventory\") pod \"fef8b1ca-fe97-49df-9d53-89edfaa3d12a\" (UID: \"fef8b1ca-fe97-49df-9d53-89edfaa3d12a\") " Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.473300 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2mxz\" (UniqueName: \"kubernetes.io/projected/fef8b1ca-fe97-49df-9d53-89edfaa3d12a-kube-api-access-f2mxz\") pod \"fef8b1ca-fe97-49df-9d53-89edfaa3d12a\" (UID: \"fef8b1ca-fe97-49df-9d53-89edfaa3d12a\") " Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.480757 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fef8b1ca-fe97-49df-9d53-89edfaa3d12a-kube-api-access-f2mxz" (OuterVolumeSpecName: "kube-api-access-f2mxz") pod "fef8b1ca-fe97-49df-9d53-89edfaa3d12a" (UID: "fef8b1ca-fe97-49df-9d53-89edfaa3d12a"). InnerVolumeSpecName "kube-api-access-f2mxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.505629 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fef8b1ca-fe97-49df-9d53-89edfaa3d12a-inventory" (OuterVolumeSpecName: "inventory") pod "fef8b1ca-fe97-49df-9d53-89edfaa3d12a" (UID: "fef8b1ca-fe97-49df-9d53-89edfaa3d12a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.507454 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fef8b1ca-fe97-49df-9d53-89edfaa3d12a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fef8b1ca-fe97-49df-9d53-89edfaa3d12a" (UID: "fef8b1ca-fe97-49df-9d53-89edfaa3d12a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.575259 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2mxz\" (UniqueName: \"kubernetes.io/projected/fef8b1ca-fe97-49df-9d53-89edfaa3d12a-kube-api-access-f2mxz\") on node \"crc\" DevicePath \"\"" Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.575297 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fef8b1ca-fe97-49df-9d53-89edfaa3d12a-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.575307 4899 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fef8b1ca-fe97-49df-9d53-89edfaa3d12a-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.906498 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699" event={"ID":"fef8b1ca-fe97-49df-9d53-89edfaa3d12a","Type":"ContainerDied","Data":"9c48f66d45fcb8058f338c0edd0e43c91cb7dace945ac5ec1326d77678d07c71"} Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.906550 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7l699" Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.906556 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c48f66d45fcb8058f338c0edd0e43c91cb7dace945ac5ec1326d77678d07c71" Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.984686 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k"] Oct 03 09:07:37 crc kubenswrapper[4899]: E1003 09:07:37.985301 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fef8b1ca-fe97-49df-9d53-89edfaa3d12a" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.985323 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="fef8b1ca-fe97-49df-9d53-89edfaa3d12a" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.985642 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="fef8b1ca-fe97-49df-9d53-89edfaa3d12a" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.986526 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k" Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.989373 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.989379 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.989571 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:07:37 crc kubenswrapper[4899]: I1003 09:07:37.992871 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:07:38 crc kubenswrapper[4899]: I1003 09:07:38.000857 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k"] Oct 03 09:07:38 crc kubenswrapper[4899]: I1003 09:07:38.084712 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fea4a390-4920-4967-9e2a-152d46f212a3-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k\" (UID: \"fea4a390-4920-4967-9e2a-152d46f212a3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k" Oct 03 09:07:38 crc kubenswrapper[4899]: I1003 09:07:38.084848 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nvtt\" (UniqueName: \"kubernetes.io/projected/fea4a390-4920-4967-9e2a-152d46f212a3-kube-api-access-2nvtt\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k\" (UID: \"fea4a390-4920-4967-9e2a-152d46f212a3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k" Oct 03 09:07:38 crc kubenswrapper[4899]: I1003 09:07:38.084997 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fea4a390-4920-4967-9e2a-152d46f212a3-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k\" (UID: \"fea4a390-4920-4967-9e2a-152d46f212a3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k" Oct 03 09:07:38 crc kubenswrapper[4899]: I1003 09:07:38.187073 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nvtt\" (UniqueName: \"kubernetes.io/projected/fea4a390-4920-4967-9e2a-152d46f212a3-kube-api-access-2nvtt\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k\" (UID: \"fea4a390-4920-4967-9e2a-152d46f212a3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k" Oct 03 09:07:38 crc kubenswrapper[4899]: I1003 09:07:38.187166 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fea4a390-4920-4967-9e2a-152d46f212a3-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k\" (UID: \"fea4a390-4920-4967-9e2a-152d46f212a3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k" Oct 03 09:07:38 crc kubenswrapper[4899]: I1003 09:07:38.187337 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fea4a390-4920-4967-9e2a-152d46f212a3-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k\" (UID: \"fea4a390-4920-4967-9e2a-152d46f212a3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k" Oct 03 09:07:38 crc kubenswrapper[4899]: I1003 09:07:38.190585 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fea4a390-4920-4967-9e2a-152d46f212a3-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k\" (UID: \"fea4a390-4920-4967-9e2a-152d46f212a3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k" Oct 03 09:07:38 crc kubenswrapper[4899]: I1003 09:07:38.190585 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fea4a390-4920-4967-9e2a-152d46f212a3-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k\" (UID: \"fea4a390-4920-4967-9e2a-152d46f212a3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k" Oct 03 09:07:38 crc kubenswrapper[4899]: I1003 09:07:38.204179 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nvtt\" (UniqueName: \"kubernetes.io/projected/fea4a390-4920-4967-9e2a-152d46f212a3-kube-api-access-2nvtt\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k\" (UID: \"fea4a390-4920-4967-9e2a-152d46f212a3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k" Oct 03 09:07:38 crc kubenswrapper[4899]: I1003 09:07:38.303798 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k" Oct 03 09:07:38 crc kubenswrapper[4899]: I1003 09:07:38.527161 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:07:38 crc kubenswrapper[4899]: E1003 09:07:38.527738 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:07:38 crc kubenswrapper[4899]: I1003 09:07:38.811375 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k"] Oct 03 09:07:38 crc kubenswrapper[4899]: I1003 09:07:38.916111 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k" event={"ID":"fea4a390-4920-4967-9e2a-152d46f212a3","Type":"ContainerStarted","Data":"6b6879464ae72a9542b0536bb94217c971d1251b81b447adbb2d1e3403fa7d92"} Oct 03 09:07:39 crc kubenswrapper[4899]: I1003 09:07:39.925354 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k" event={"ID":"fea4a390-4920-4967-9e2a-152d46f212a3","Type":"ContainerStarted","Data":"217bf4f0ae44ac87cd39ab2c518d84f2a7a047636347acedade46658b27e35e6"} Oct 03 09:07:39 crc kubenswrapper[4899]: I1003 09:07:39.942187 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k" podStartSLOduration=2.474239897 podStartE2EDuration="2.942161031s" podCreationTimestamp="2025-10-03 09:07:37 +0000 UTC" firstStartedPulling="2025-10-03 09:07:38.816708245 +0000 UTC m=+1632.924193198" lastFinishedPulling="2025-10-03 09:07:39.284629379 +0000 UTC m=+1633.392114332" observedRunningTime="2025-10-03 09:07:39.94085153 +0000 UTC m=+1634.048336483" watchObservedRunningTime="2025-10-03 09:07:39.942161031 +0000 UTC m=+1634.049645984" Oct 03 09:07:44 crc kubenswrapper[4899]: I1003 09:07:44.971156 4899 generic.go:334] "Generic (PLEG): container finished" podID="fea4a390-4920-4967-9e2a-152d46f212a3" containerID="217bf4f0ae44ac87cd39ab2c518d84f2a7a047636347acedade46658b27e35e6" exitCode=0 Oct 03 09:07:44 crc kubenswrapper[4899]: I1003 09:07:44.971283 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k" event={"ID":"fea4a390-4920-4967-9e2a-152d46f212a3","Type":"ContainerDied","Data":"217bf4f0ae44ac87cd39ab2c518d84f2a7a047636347acedade46658b27e35e6"} Oct 03 09:07:46 crc kubenswrapper[4899]: I1003 09:07:46.337906 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k" Oct 03 09:07:46 crc kubenswrapper[4899]: I1003 09:07:46.445761 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fea4a390-4920-4967-9e2a-152d46f212a3-inventory\") pod \"fea4a390-4920-4967-9e2a-152d46f212a3\" (UID: \"fea4a390-4920-4967-9e2a-152d46f212a3\") " Oct 03 09:07:46 crc kubenswrapper[4899]: I1003 09:07:46.445963 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nvtt\" (UniqueName: \"kubernetes.io/projected/fea4a390-4920-4967-9e2a-152d46f212a3-kube-api-access-2nvtt\") pod \"fea4a390-4920-4967-9e2a-152d46f212a3\" (UID: \"fea4a390-4920-4967-9e2a-152d46f212a3\") " Oct 03 09:07:46 crc kubenswrapper[4899]: I1003 09:07:46.446099 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fea4a390-4920-4967-9e2a-152d46f212a3-ssh-key\") pod \"fea4a390-4920-4967-9e2a-152d46f212a3\" (UID: \"fea4a390-4920-4967-9e2a-152d46f212a3\") " Oct 03 09:07:46 crc kubenswrapper[4899]: I1003 09:07:46.452095 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fea4a390-4920-4967-9e2a-152d46f212a3-kube-api-access-2nvtt" (OuterVolumeSpecName: "kube-api-access-2nvtt") pod "fea4a390-4920-4967-9e2a-152d46f212a3" (UID: "fea4a390-4920-4967-9e2a-152d46f212a3"). InnerVolumeSpecName "kube-api-access-2nvtt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:07:46 crc kubenswrapper[4899]: I1003 09:07:46.474157 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fea4a390-4920-4967-9e2a-152d46f212a3-inventory" (OuterVolumeSpecName: "inventory") pod "fea4a390-4920-4967-9e2a-152d46f212a3" (UID: "fea4a390-4920-4967-9e2a-152d46f212a3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:07:46 crc kubenswrapper[4899]: I1003 09:07:46.478285 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fea4a390-4920-4967-9e2a-152d46f212a3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fea4a390-4920-4967-9e2a-152d46f212a3" (UID: "fea4a390-4920-4967-9e2a-152d46f212a3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:07:46 crc kubenswrapper[4899]: I1003 09:07:46.548662 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nvtt\" (UniqueName: \"kubernetes.io/projected/fea4a390-4920-4967-9e2a-152d46f212a3-kube-api-access-2nvtt\") on node \"crc\" DevicePath \"\"" Oct 03 09:07:46 crc kubenswrapper[4899]: I1003 09:07:46.548693 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fea4a390-4920-4967-9e2a-152d46f212a3-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 09:07:46 crc kubenswrapper[4899]: I1003 09:07:46.548702 4899 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fea4a390-4920-4967-9e2a-152d46f212a3-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 09:07:46 crc kubenswrapper[4899]: I1003 09:07:46.989325 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k" event={"ID":"fea4a390-4920-4967-9e2a-152d46f212a3","Type":"ContainerDied","Data":"6b6879464ae72a9542b0536bb94217c971d1251b81b447adbb2d1e3403fa7d92"} Oct 03 09:07:46 crc kubenswrapper[4899]: I1003 09:07:46.989363 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b6879464ae72a9542b0536bb94217c971d1251b81b447adbb2d1e3403fa7d92" Oct 03 09:07:46 crc kubenswrapper[4899]: I1003 09:07:46.989381 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.069239 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr"] Oct 03 09:07:47 crc kubenswrapper[4899]: E1003 09:07:47.069774 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fea4a390-4920-4967-9e2a-152d46f212a3" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.069794 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="fea4a390-4920-4967-9e2a-152d46f212a3" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.070070 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="fea4a390-4920-4967-9e2a-152d46f212a3" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.070980 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.073723 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.074113 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.074259 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.074278 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.078728 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr"] Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.261714 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzckm\" (UniqueName: \"kubernetes.io/projected/6a1cd0af-23db-4234-b97c-e57852eaa634-kube-api-access-gzckm\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bxmbr\" (UID: \"6a1cd0af-23db-4234-b97c-e57852eaa634\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.262763 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a1cd0af-23db-4234-b97c-e57852eaa634-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bxmbr\" (UID: \"6a1cd0af-23db-4234-b97c-e57852eaa634\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.262794 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a1cd0af-23db-4234-b97c-e57852eaa634-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bxmbr\" (UID: \"6a1cd0af-23db-4234-b97c-e57852eaa634\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.364938 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzckm\" (UniqueName: \"kubernetes.io/projected/6a1cd0af-23db-4234-b97c-e57852eaa634-kube-api-access-gzckm\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bxmbr\" (UID: \"6a1cd0af-23db-4234-b97c-e57852eaa634\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.365006 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a1cd0af-23db-4234-b97c-e57852eaa634-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bxmbr\" (UID: \"6a1cd0af-23db-4234-b97c-e57852eaa634\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.365035 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a1cd0af-23db-4234-b97c-e57852eaa634-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bxmbr\" (UID: \"6a1cd0af-23db-4234-b97c-e57852eaa634\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.371636 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a1cd0af-23db-4234-b97c-e57852eaa634-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bxmbr\" (UID: \"6a1cd0af-23db-4234-b97c-e57852eaa634\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.371877 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a1cd0af-23db-4234-b97c-e57852eaa634-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bxmbr\" (UID: \"6a1cd0af-23db-4234-b97c-e57852eaa634\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.381706 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzckm\" (UniqueName: \"kubernetes.io/projected/6a1cd0af-23db-4234-b97c-e57852eaa634-kube-api-access-gzckm\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bxmbr\" (UID: \"6a1cd0af-23db-4234-b97c-e57852eaa634\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.402120 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr" Oct 03 09:07:47 crc kubenswrapper[4899]: I1003 09:07:47.929828 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr"] Oct 03 09:07:48 crc kubenswrapper[4899]: I1003 09:07:48.000816 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr" event={"ID":"6a1cd0af-23db-4234-b97c-e57852eaa634","Type":"ContainerStarted","Data":"94393d11f9b0f1764b4784add354aff89a8b624fd4f5a37e6ab4a21cf6a534f8"} Oct 03 09:07:48 crc kubenswrapper[4899]: I1003 09:07:48.801462 4899 scope.go:117] "RemoveContainer" containerID="468a832bcbae0a299dcc63dd7c633e686d058e1925d0a6128323007644557f3f" Oct 03 09:07:48 crc kubenswrapper[4899]: I1003 09:07:48.859606 4899 scope.go:117] "RemoveContainer" containerID="4e06dd4031a4a05ffb46e1fd96867356a33a50e1fea760358ba333e12f578a99" Oct 03 09:07:48 crc kubenswrapper[4899]: I1003 09:07:48.917204 4899 scope.go:117] "RemoveContainer" containerID="0884fff84a52ad2b22541251c235b6dd4fd12d11c778d61d51d5c2e38bf2da94" Oct 03 09:07:48 crc kubenswrapper[4899]: I1003 09:07:48.954070 4899 scope.go:117] "RemoveContainer" containerID="73484b2bfe6b3bff6a4cb1f73e2971c4f032021f044f264469f6546e5686573b" Oct 03 09:07:50 crc kubenswrapper[4899]: I1003 09:07:50.030320 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr" event={"ID":"6a1cd0af-23db-4234-b97c-e57852eaa634","Type":"ContainerStarted","Data":"09a1f5817722519794b51bcde13638c3a20d3aa656faf1035b8a4b1a6ed00301"} Oct 03 09:07:50 crc kubenswrapper[4899]: I1003 09:07:50.043736 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr" podStartSLOduration=2.061361772 podStartE2EDuration="3.043710637s" podCreationTimestamp="2025-10-03 09:07:47 +0000 UTC" firstStartedPulling="2025-10-03 09:07:47.935093961 +0000 UTC m=+1642.042578914" lastFinishedPulling="2025-10-03 09:07:48.917442826 +0000 UTC m=+1643.024927779" observedRunningTime="2025-10-03 09:07:50.042358865 +0000 UTC m=+1644.149843818" watchObservedRunningTime="2025-10-03 09:07:50.043710637 +0000 UTC m=+1644.151195590" Oct 03 09:07:51 crc kubenswrapper[4899]: I1003 09:07:51.035517 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-6mwps"] Oct 03 09:07:51 crc kubenswrapper[4899]: I1003 09:07:51.043853 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-9d7r2"] Oct 03 09:07:51 crc kubenswrapper[4899]: I1003 09:07:51.054460 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-cxdjt"] Oct 03 09:07:51 crc kubenswrapper[4899]: I1003 09:07:51.064497 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-9d7r2"] Oct 03 09:07:51 crc kubenswrapper[4899]: I1003 09:07:51.072853 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-cxdjt"] Oct 03 09:07:51 crc kubenswrapper[4899]: I1003 09:07:51.079824 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-6mwps"] Oct 03 09:07:52 crc kubenswrapper[4899]: I1003 09:07:52.527853 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:07:52 crc kubenswrapper[4899]: E1003 09:07:52.528331 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:07:52 crc kubenswrapper[4899]: I1003 09:07:52.539122 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e579b0b-7373-4ab5-b543-3b58fa367f1a" path="/var/lib/kubelet/pods/1e579b0b-7373-4ab5-b543-3b58fa367f1a/volumes" Oct 03 09:07:52 crc kubenswrapper[4899]: I1003 09:07:52.539627 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="babb4e0d-c482-4cb4-8140-4bfc34ab2afc" path="/var/lib/kubelet/pods/babb4e0d-c482-4cb4-8140-4bfc34ab2afc/volumes" Oct 03 09:07:52 crc kubenswrapper[4899]: I1003 09:07:52.540133 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d14aaa2d-9ea7-4886-8464-effc840164f4" path="/var/lib/kubelet/pods/d14aaa2d-9ea7-4886-8464-effc840164f4/volumes" Oct 03 09:08:05 crc kubenswrapper[4899]: I1003 09:08:05.028394 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-994c-account-create-6qslq"] Oct 03 09:08:05 crc kubenswrapper[4899]: I1003 09:08:05.037288 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-994c-account-create-6qslq"] Oct 03 09:08:05 crc kubenswrapper[4899]: I1003 09:08:05.045931 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-1337-account-create-6dt5c"] Oct 03 09:08:05 crc kubenswrapper[4899]: I1003 09:08:05.052981 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-1337-account-create-6dt5c"] Oct 03 09:08:05 crc kubenswrapper[4899]: I1003 09:08:05.527264 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:08:05 crc kubenswrapper[4899]: E1003 09:08:05.527788 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:08:06 crc kubenswrapper[4899]: I1003 09:08:06.024315 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-4370-account-create-zqsn8"] Oct 03 09:08:06 crc kubenswrapper[4899]: I1003 09:08:06.032099 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-4370-account-create-zqsn8"] Oct 03 09:08:06 crc kubenswrapper[4899]: I1003 09:08:06.538831 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bd3bb54-4225-4123-b1e2-05bf17011397" path="/var/lib/kubelet/pods/7bd3bb54-4225-4123-b1e2-05bf17011397/volumes" Oct 03 09:08:06 crc kubenswrapper[4899]: I1003 09:08:06.540008 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85a158ee-9e94-4664-b795-9fffb1fd2674" path="/var/lib/kubelet/pods/85a158ee-9e94-4664-b795-9fffb1fd2674/volumes" Oct 03 09:08:06 crc kubenswrapper[4899]: I1003 09:08:06.540582 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784" path="/var/lib/kubelet/pods/e3f0c5eb-8c4d-42bf-9cb2-5ed03cfa5784/volumes" Oct 03 09:08:19 crc kubenswrapper[4899]: I1003 09:08:19.526922 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:08:19 crc kubenswrapper[4899]: E1003 09:08:19.527656 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:08:24 crc kubenswrapper[4899]: I1003 09:08:24.324876 4899 generic.go:334] "Generic (PLEG): container finished" podID="6a1cd0af-23db-4234-b97c-e57852eaa634" containerID="09a1f5817722519794b51bcde13638c3a20d3aa656faf1035b8a4b1a6ed00301" exitCode=0 Oct 03 09:08:24 crc kubenswrapper[4899]: I1003 09:08:24.324982 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr" event={"ID":"6a1cd0af-23db-4234-b97c-e57852eaa634","Type":"ContainerDied","Data":"09a1f5817722519794b51bcde13638c3a20d3aa656faf1035b8a4b1a6ed00301"} Oct 03 09:08:25 crc kubenswrapper[4899]: I1003 09:08:25.818714 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr" Oct 03 09:08:25 crc kubenswrapper[4899]: I1003 09:08:25.987988 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzckm\" (UniqueName: \"kubernetes.io/projected/6a1cd0af-23db-4234-b97c-e57852eaa634-kube-api-access-gzckm\") pod \"6a1cd0af-23db-4234-b97c-e57852eaa634\" (UID: \"6a1cd0af-23db-4234-b97c-e57852eaa634\") " Oct 03 09:08:25 crc kubenswrapper[4899]: I1003 09:08:25.988031 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a1cd0af-23db-4234-b97c-e57852eaa634-ssh-key\") pod \"6a1cd0af-23db-4234-b97c-e57852eaa634\" (UID: \"6a1cd0af-23db-4234-b97c-e57852eaa634\") " Oct 03 09:08:25 crc kubenswrapper[4899]: I1003 09:08:25.988168 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a1cd0af-23db-4234-b97c-e57852eaa634-inventory\") pod \"6a1cd0af-23db-4234-b97c-e57852eaa634\" (UID: \"6a1cd0af-23db-4234-b97c-e57852eaa634\") " Oct 03 09:08:25 crc kubenswrapper[4899]: I1003 09:08:25.993319 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a1cd0af-23db-4234-b97c-e57852eaa634-kube-api-access-gzckm" (OuterVolumeSpecName: "kube-api-access-gzckm") pod "6a1cd0af-23db-4234-b97c-e57852eaa634" (UID: "6a1cd0af-23db-4234-b97c-e57852eaa634"). InnerVolumeSpecName "kube-api-access-gzckm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.014342 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a1cd0af-23db-4234-b97c-e57852eaa634-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6a1cd0af-23db-4234-b97c-e57852eaa634" (UID: "6a1cd0af-23db-4234-b97c-e57852eaa634"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.016281 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a1cd0af-23db-4234-b97c-e57852eaa634-inventory" (OuterVolumeSpecName: "inventory") pod "6a1cd0af-23db-4234-b97c-e57852eaa634" (UID: "6a1cd0af-23db-4234-b97c-e57852eaa634"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.091024 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzckm\" (UniqueName: \"kubernetes.io/projected/6a1cd0af-23db-4234-b97c-e57852eaa634-kube-api-access-gzckm\") on node \"crc\" DevicePath \"\"" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.091072 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a1cd0af-23db-4234-b97c-e57852eaa634-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.091083 4899 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a1cd0af-23db-4234-b97c-e57852eaa634-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.342354 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr" event={"ID":"6a1cd0af-23db-4234-b97c-e57852eaa634","Type":"ContainerDied","Data":"94393d11f9b0f1764b4784add354aff89a8b624fd4f5a37e6ab4a21cf6a534f8"} Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.342398 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94393d11f9b0f1764b4784add354aff89a8b624fd4f5a37e6ab4a21cf6a534f8" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.342446 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bxmbr" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.487352 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f"] Oct 03 09:08:26 crc kubenswrapper[4899]: E1003 09:08:26.487918 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a1cd0af-23db-4234-b97c-e57852eaa634" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.487943 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a1cd0af-23db-4234-b97c-e57852eaa634" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.488251 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a1cd0af-23db-4234-b97c-e57852eaa634" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.489148 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.491690 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.491940 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.492163 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.498092 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.498923 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f"] Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.601457 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc6f7423-a7b0-4bd0-ac84-f65eb45233b3-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rr46f\" (UID: \"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.601574 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njzbc\" (UniqueName: \"kubernetes.io/projected/fc6f7423-a7b0-4bd0-ac84-f65eb45233b3-kube-api-access-njzbc\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rr46f\" (UID: \"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.601609 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc6f7423-a7b0-4bd0-ac84-f65eb45233b3-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rr46f\" (UID: \"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.704198 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc6f7423-a7b0-4bd0-ac84-f65eb45233b3-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rr46f\" (UID: \"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.704305 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njzbc\" (UniqueName: \"kubernetes.io/projected/fc6f7423-a7b0-4bd0-ac84-f65eb45233b3-kube-api-access-njzbc\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rr46f\" (UID: \"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.704336 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc6f7423-a7b0-4bd0-ac84-f65eb45233b3-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rr46f\" (UID: \"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.708030 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc6f7423-a7b0-4bd0-ac84-f65eb45233b3-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rr46f\" (UID: \"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.709138 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc6f7423-a7b0-4bd0-ac84-f65eb45233b3-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rr46f\" (UID: \"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.721315 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njzbc\" (UniqueName: \"kubernetes.io/projected/fc6f7423-a7b0-4bd0-ac84-f65eb45233b3-kube-api-access-njzbc\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rr46f\" (UID: \"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f" Oct 03 09:08:26 crc kubenswrapper[4899]: I1003 09:08:26.806116 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f" Oct 03 09:08:27 crc kubenswrapper[4899]: I1003 09:08:27.048615 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bvzwv"] Oct 03 09:08:27 crc kubenswrapper[4899]: I1003 09:08:27.061412 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bvzwv"] Oct 03 09:08:27 crc kubenswrapper[4899]: I1003 09:08:27.316481 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f"] Oct 03 09:08:27 crc kubenswrapper[4899]: I1003 09:08:27.358764 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f" event={"ID":"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3","Type":"ContainerStarted","Data":"f40a7ba639acf4d698b04b1e67eb87fc98d996261a8f747ec1dfb7362e0f1cc7"} Oct 03 09:08:28 crc kubenswrapper[4899]: I1003 09:08:28.368635 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f" event={"ID":"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3","Type":"ContainerStarted","Data":"c5cf34a61c25f5e13e2693fdfafcd2d8a643cf61dd5673c95ed1e889623ec919"} Oct 03 09:08:28 crc kubenswrapper[4899]: I1003 09:08:28.538308 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52b7bde3-1637-40ef-8b47-f918b97a958d" path="/var/lib/kubelet/pods/52b7bde3-1637-40ef-8b47-f918b97a958d/volumes" Oct 03 09:08:34 crc kubenswrapper[4899]: I1003 09:08:34.527318 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:08:34 crc kubenswrapper[4899]: E1003 09:08:34.528144 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:08:48 crc kubenswrapper[4899]: I1003 09:08:48.526818 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:08:48 crc kubenswrapper[4899]: E1003 09:08:48.527669 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:08:49 crc kubenswrapper[4899]: I1003 09:08:49.037757 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f" podStartSLOduration=22.643979051 podStartE2EDuration="23.037739497s" podCreationTimestamp="2025-10-03 09:08:26 +0000 UTC" firstStartedPulling="2025-10-03 09:08:27.321202123 +0000 UTC m=+1681.428687076" lastFinishedPulling="2025-10-03 09:08:27.714962569 +0000 UTC m=+1681.822447522" observedRunningTime="2025-10-03 09:08:28.38735822 +0000 UTC m=+1682.494843193" watchObservedRunningTime="2025-10-03 09:08:49.037739497 +0000 UTC m=+1703.145224450" Oct 03 09:08:49 crc kubenswrapper[4899]: I1003 09:08:49.044488 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-9sndc"] Oct 03 09:08:49 crc kubenswrapper[4899]: I1003 09:08:49.052812 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-q2tr6"] Oct 03 09:08:49 crc kubenswrapper[4899]: I1003 09:08:49.060249 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-9sndc"] Oct 03 09:08:49 crc kubenswrapper[4899]: I1003 09:08:49.070024 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-q2tr6"] Oct 03 09:08:49 crc kubenswrapper[4899]: I1003 09:08:49.136865 4899 scope.go:117] "RemoveContainer" containerID="a91319d94bb8ccc1f351de2325d4f9530edc814ec3530255ed286e6449e5aae7" Oct 03 09:08:49 crc kubenswrapper[4899]: I1003 09:08:49.158702 4899 scope.go:117] "RemoveContainer" containerID="a4476fbf54ecddbb11f66691aefd0fb12c79aa5f593225e5bc908a3e1fe6ef5a" Oct 03 09:08:49 crc kubenswrapper[4899]: I1003 09:08:49.205832 4899 scope.go:117] "RemoveContainer" containerID="a2c7047cd3233b03f2538a69a9a22242e27ea5ba9071c94e787d7e56d64d942f" Oct 03 09:08:49 crc kubenswrapper[4899]: I1003 09:08:49.258356 4899 scope.go:117] "RemoveContainer" containerID="41c347ba33df1ded9bfc575bc426f2f75d559d4b6655452d8ae4a029bccdd8d0" Oct 03 09:08:49 crc kubenswrapper[4899]: I1003 09:08:49.302779 4899 scope.go:117] "RemoveContainer" containerID="df171ed54292357689c64ee4e5a2db694b9aff4216b1e0b2186b98ed5fd833c9" Oct 03 09:08:49 crc kubenswrapper[4899]: I1003 09:08:49.345739 4899 scope.go:117] "RemoveContainer" containerID="eb136b32f264090663bb52dfb6d93da62eba3d29aa93f368ca6883509671f4ce" Oct 03 09:08:49 crc kubenswrapper[4899]: I1003 09:08:49.410622 4899 scope.go:117] "RemoveContainer" containerID="b28d0c718bf862515c9c5ab48a1ff617b5e50c09cc4795bdedbe0c04b1182fe9" Oct 03 09:08:50 crc kubenswrapper[4899]: I1003 09:08:50.540403 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad3fd26b-3f3c-4c4e-be50-ce49315719f2" path="/var/lib/kubelet/pods/ad3fd26b-3f3c-4c4e-be50-ce49315719f2/volumes" Oct 03 09:08:50 crc kubenswrapper[4899]: I1003 09:08:50.541047 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e050af65-bad7-412a-bb6f-7e7bb65573a4" path="/var/lib/kubelet/pods/e050af65-bad7-412a-bb6f-7e7bb65573a4/volumes" Oct 03 09:09:01 crc kubenswrapper[4899]: I1003 09:09:01.527367 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:09:01 crc kubenswrapper[4899]: E1003 09:09:01.528211 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:09:16 crc kubenswrapper[4899]: I1003 09:09:16.533763 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:09:16 crc kubenswrapper[4899]: E1003 09:09:16.534521 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:09:20 crc kubenswrapper[4899]: I1003 09:09:20.796047 4899 generic.go:334] "Generic (PLEG): container finished" podID="fc6f7423-a7b0-4bd0-ac84-f65eb45233b3" containerID="c5cf34a61c25f5e13e2693fdfafcd2d8a643cf61dd5673c95ed1e889623ec919" exitCode=2 Oct 03 09:09:20 crc kubenswrapper[4899]: I1003 09:09:20.796134 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f" event={"ID":"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3","Type":"ContainerDied","Data":"c5cf34a61c25f5e13e2693fdfafcd2d8a643cf61dd5673c95ed1e889623ec919"} Oct 03 09:09:22 crc kubenswrapper[4899]: I1003 09:09:22.211184 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f" Oct 03 09:09:22 crc kubenswrapper[4899]: I1003 09:09:22.264087 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc6f7423-a7b0-4bd0-ac84-f65eb45233b3-inventory\") pod \"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3\" (UID: \"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3\") " Oct 03 09:09:22 crc kubenswrapper[4899]: I1003 09:09:22.264286 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc6f7423-a7b0-4bd0-ac84-f65eb45233b3-ssh-key\") pod \"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3\" (UID: \"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3\") " Oct 03 09:09:22 crc kubenswrapper[4899]: I1003 09:09:22.264336 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njzbc\" (UniqueName: \"kubernetes.io/projected/fc6f7423-a7b0-4bd0-ac84-f65eb45233b3-kube-api-access-njzbc\") pod \"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3\" (UID: \"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3\") " Oct 03 09:09:22 crc kubenswrapper[4899]: I1003 09:09:22.269880 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc6f7423-a7b0-4bd0-ac84-f65eb45233b3-kube-api-access-njzbc" (OuterVolumeSpecName: "kube-api-access-njzbc") pod "fc6f7423-a7b0-4bd0-ac84-f65eb45233b3" (UID: "fc6f7423-a7b0-4bd0-ac84-f65eb45233b3"). InnerVolumeSpecName "kube-api-access-njzbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:09:22 crc kubenswrapper[4899]: I1003 09:09:22.292821 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc6f7423-a7b0-4bd0-ac84-f65eb45233b3-inventory" (OuterVolumeSpecName: "inventory") pod "fc6f7423-a7b0-4bd0-ac84-f65eb45233b3" (UID: "fc6f7423-a7b0-4bd0-ac84-f65eb45233b3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:09:22 crc kubenswrapper[4899]: I1003 09:09:22.295747 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc6f7423-a7b0-4bd0-ac84-f65eb45233b3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fc6f7423-a7b0-4bd0-ac84-f65eb45233b3" (UID: "fc6f7423-a7b0-4bd0-ac84-f65eb45233b3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:09:22 crc kubenswrapper[4899]: I1003 09:09:22.366752 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc6f7423-a7b0-4bd0-ac84-f65eb45233b3-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 09:09:22 crc kubenswrapper[4899]: I1003 09:09:22.366791 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njzbc\" (UniqueName: \"kubernetes.io/projected/fc6f7423-a7b0-4bd0-ac84-f65eb45233b3-kube-api-access-njzbc\") on node \"crc\" DevicePath \"\"" Oct 03 09:09:22 crc kubenswrapper[4899]: I1003 09:09:22.366802 4899 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc6f7423-a7b0-4bd0-ac84-f65eb45233b3-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 09:09:22 crc kubenswrapper[4899]: E1003 09:09:22.660866 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc6f7423_a7b0_4bd0_ac84_f65eb45233b3.slice\": RecentStats: unable to find data in memory cache]" Oct 03 09:09:22 crc kubenswrapper[4899]: I1003 09:09:22.831521 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f" event={"ID":"fc6f7423-a7b0-4bd0-ac84-f65eb45233b3","Type":"ContainerDied","Data":"f40a7ba639acf4d698b04b1e67eb87fc98d996261a8f747ec1dfb7362e0f1cc7"} Oct 03 09:09:22 crc kubenswrapper[4899]: I1003 09:09:22.831565 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f40a7ba639acf4d698b04b1e67eb87fc98d996261a8f747ec1dfb7362e0f1cc7" Oct 03 09:09:22 crc kubenswrapper[4899]: I1003 09:09:22.831602 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rr46f" Oct 03 09:09:29 crc kubenswrapper[4899]: I1003 09:09:29.527323 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:09:29 crc kubenswrapper[4899]: E1003 09:09:29.528217 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.039220 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw"] Oct 03 09:09:30 crc kubenswrapper[4899]: E1003 09:09:30.039622 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc6f7423-a7b0-4bd0-ac84-f65eb45233b3" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.039640 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc6f7423-a7b0-4bd0-ac84-f65eb45233b3" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.039842 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc6f7423-a7b0-4bd0-ac84-f65eb45233b3" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.040528 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.044018 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.044051 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.044390 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.046053 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.100241 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw"] Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.118589 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlxbl\" (UniqueName: \"kubernetes.io/projected/51abfa86-5d01-4b3e-aceb-155ded93aa49-kube-api-access-rlxbl\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-djftw\" (UID: \"51abfa86-5d01-4b3e-aceb-155ded93aa49\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.118672 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51abfa86-5d01-4b3e-aceb-155ded93aa49-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-djftw\" (UID: \"51abfa86-5d01-4b3e-aceb-155ded93aa49\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.118802 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51abfa86-5d01-4b3e-aceb-155ded93aa49-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-djftw\" (UID: \"51abfa86-5d01-4b3e-aceb-155ded93aa49\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.220253 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51abfa86-5d01-4b3e-aceb-155ded93aa49-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-djftw\" (UID: \"51abfa86-5d01-4b3e-aceb-155ded93aa49\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.220364 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlxbl\" (UniqueName: \"kubernetes.io/projected/51abfa86-5d01-4b3e-aceb-155ded93aa49-kube-api-access-rlxbl\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-djftw\" (UID: \"51abfa86-5d01-4b3e-aceb-155ded93aa49\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.220415 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51abfa86-5d01-4b3e-aceb-155ded93aa49-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-djftw\" (UID: \"51abfa86-5d01-4b3e-aceb-155ded93aa49\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.227169 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51abfa86-5d01-4b3e-aceb-155ded93aa49-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-djftw\" (UID: \"51abfa86-5d01-4b3e-aceb-155ded93aa49\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.227391 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51abfa86-5d01-4b3e-aceb-155ded93aa49-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-djftw\" (UID: \"51abfa86-5d01-4b3e-aceb-155ded93aa49\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.238309 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlxbl\" (UniqueName: \"kubernetes.io/projected/51abfa86-5d01-4b3e-aceb-155ded93aa49-kube-api-access-rlxbl\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-djftw\" (UID: \"51abfa86-5d01-4b3e-aceb-155ded93aa49\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.362632 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw" Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.871700 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw"] Oct 03 09:09:30 crc kubenswrapper[4899]: W1003 09:09:30.876430 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51abfa86_5d01_4b3e_aceb_155ded93aa49.slice/crio-70f655e99751252f84de62198aa3bef4d1fbc4cb85e823fd0e2a2781aa2b6dec WatchSource:0}: Error finding container 70f655e99751252f84de62198aa3bef4d1fbc4cb85e823fd0e2a2781aa2b6dec: Status 404 returned error can't find the container with id 70f655e99751252f84de62198aa3bef4d1fbc4cb85e823fd0e2a2781aa2b6dec Oct 03 09:09:30 crc kubenswrapper[4899]: I1003 09:09:30.896438 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw" event={"ID":"51abfa86-5d01-4b3e-aceb-155ded93aa49","Type":"ContainerStarted","Data":"70f655e99751252f84de62198aa3bef4d1fbc4cb85e823fd0e2a2781aa2b6dec"} Oct 03 09:09:31 crc kubenswrapper[4899]: I1003 09:09:31.908867 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw" event={"ID":"51abfa86-5d01-4b3e-aceb-155ded93aa49","Type":"ContainerStarted","Data":"25df7d07fe4046df8537eefa6c40d23dd82f1ba560de1b2eecd73c6cd9d21c52"} Oct 03 09:09:31 crc kubenswrapper[4899]: I1003 09:09:31.925827 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw" podStartSLOduration=1.2967475529999999 podStartE2EDuration="1.925803158s" podCreationTimestamp="2025-10-03 09:09:30 +0000 UTC" firstStartedPulling="2025-10-03 09:09:30.878775675 +0000 UTC m=+1744.986260628" lastFinishedPulling="2025-10-03 09:09:31.50783128 +0000 UTC m=+1745.615316233" observedRunningTime="2025-10-03 09:09:31.924397425 +0000 UTC m=+1746.031882378" watchObservedRunningTime="2025-10-03 09:09:31.925803158 +0000 UTC m=+1746.033288121" Oct 03 09:09:34 crc kubenswrapper[4899]: I1003 09:09:34.064767 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-2gxgr"] Oct 03 09:09:34 crc kubenswrapper[4899]: I1003 09:09:34.072830 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-2gxgr"] Oct 03 09:09:34 crc kubenswrapper[4899]: I1003 09:09:34.539699 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a51e9ebc-665e-4ee1-bbef-935ff3835fbd" path="/var/lib/kubelet/pods/a51e9ebc-665e-4ee1-bbef-935ff3835fbd/volumes" Oct 03 09:09:41 crc kubenswrapper[4899]: I1003 09:09:41.528338 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:09:41 crc kubenswrapper[4899]: E1003 09:09:41.529066 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:09:49 crc kubenswrapper[4899]: I1003 09:09:49.589408 4899 scope.go:117] "RemoveContainer" containerID="36e1f06c9f9ffb9e47277f3597b16a8d28da466b9ce08288d1f4951c2d705b1b" Oct 03 09:09:49 crc kubenswrapper[4899]: I1003 09:09:49.668190 4899 scope.go:117] "RemoveContainer" containerID="c37ad0691076d112c3c9c0ca730d7d97fa19cb1e772ef18b8f600b798c847111" Oct 03 09:09:49 crc kubenswrapper[4899]: I1003 09:09:49.766272 4899 scope.go:117] "RemoveContainer" containerID="94d809a76375d5b33250adc68a0dd8b5db3a6cb0efd13c6fa4823522858f0951" Oct 03 09:09:56 crc kubenswrapper[4899]: I1003 09:09:56.533439 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:09:56 crc kubenswrapper[4899]: E1003 09:09:56.534379 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:10:08 crc kubenswrapper[4899]: I1003 09:10:08.527747 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:10:08 crc kubenswrapper[4899]: E1003 09:10:08.528615 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:10:13 crc kubenswrapper[4899]: I1003 09:10:13.269622 4899 generic.go:334] "Generic (PLEG): container finished" podID="51abfa86-5d01-4b3e-aceb-155ded93aa49" containerID="25df7d07fe4046df8537eefa6c40d23dd82f1ba560de1b2eecd73c6cd9d21c52" exitCode=0 Oct 03 09:10:13 crc kubenswrapper[4899]: I1003 09:10:13.269705 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw" event={"ID":"51abfa86-5d01-4b3e-aceb-155ded93aa49","Type":"ContainerDied","Data":"25df7d07fe4046df8537eefa6c40d23dd82f1ba560de1b2eecd73c6cd9d21c52"} Oct 03 09:10:14 crc kubenswrapper[4899]: I1003 09:10:14.664829 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw" Oct 03 09:10:14 crc kubenswrapper[4899]: I1003 09:10:14.771223 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51abfa86-5d01-4b3e-aceb-155ded93aa49-ssh-key\") pod \"51abfa86-5d01-4b3e-aceb-155ded93aa49\" (UID: \"51abfa86-5d01-4b3e-aceb-155ded93aa49\") " Oct 03 09:10:14 crc kubenswrapper[4899]: I1003 09:10:14.771363 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51abfa86-5d01-4b3e-aceb-155ded93aa49-inventory\") pod \"51abfa86-5d01-4b3e-aceb-155ded93aa49\" (UID: \"51abfa86-5d01-4b3e-aceb-155ded93aa49\") " Oct 03 09:10:14 crc kubenswrapper[4899]: I1003 09:10:14.771435 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlxbl\" (UniqueName: \"kubernetes.io/projected/51abfa86-5d01-4b3e-aceb-155ded93aa49-kube-api-access-rlxbl\") pod \"51abfa86-5d01-4b3e-aceb-155ded93aa49\" (UID: \"51abfa86-5d01-4b3e-aceb-155ded93aa49\") " Oct 03 09:10:14 crc kubenswrapper[4899]: I1003 09:10:14.777344 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51abfa86-5d01-4b3e-aceb-155ded93aa49-kube-api-access-rlxbl" (OuterVolumeSpecName: "kube-api-access-rlxbl") pod "51abfa86-5d01-4b3e-aceb-155ded93aa49" (UID: "51abfa86-5d01-4b3e-aceb-155ded93aa49"). InnerVolumeSpecName "kube-api-access-rlxbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:10:14 crc kubenswrapper[4899]: I1003 09:10:14.797633 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51abfa86-5d01-4b3e-aceb-155ded93aa49-inventory" (OuterVolumeSpecName: "inventory") pod "51abfa86-5d01-4b3e-aceb-155ded93aa49" (UID: "51abfa86-5d01-4b3e-aceb-155ded93aa49"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:10:14 crc kubenswrapper[4899]: I1003 09:10:14.798806 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51abfa86-5d01-4b3e-aceb-155ded93aa49-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "51abfa86-5d01-4b3e-aceb-155ded93aa49" (UID: "51abfa86-5d01-4b3e-aceb-155ded93aa49"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:10:14 crc kubenswrapper[4899]: I1003 09:10:14.873725 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51abfa86-5d01-4b3e-aceb-155ded93aa49-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 09:10:14 crc kubenswrapper[4899]: I1003 09:10:14.873774 4899 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51abfa86-5d01-4b3e-aceb-155ded93aa49-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 09:10:14 crc kubenswrapper[4899]: I1003 09:10:14.873789 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlxbl\" (UniqueName: \"kubernetes.io/projected/51abfa86-5d01-4b3e-aceb-155ded93aa49-kube-api-access-rlxbl\") on node \"crc\" DevicePath \"\"" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.289390 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw" event={"ID":"51abfa86-5d01-4b3e-aceb-155ded93aa49","Type":"ContainerDied","Data":"70f655e99751252f84de62198aa3bef4d1fbc4cb85e823fd0e2a2781aa2b6dec"} Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.289443 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="70f655e99751252f84de62198aa3bef4d1fbc4cb85e823fd0e2a2781aa2b6dec" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.289450 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-djftw" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.374168 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-4qh2m"] Oct 03 09:10:15 crc kubenswrapper[4899]: E1003 09:10:15.374708 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51abfa86-5d01-4b3e-aceb-155ded93aa49" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.374737 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="51abfa86-5d01-4b3e-aceb-155ded93aa49" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.374966 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="51abfa86-5d01-4b3e-aceb-155ded93aa49" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.375632 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-4qh2m" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.378932 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.380727 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.381522 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.382677 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.383543 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-4qh2m"] Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.486536 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/f95e0dc0-dc93-4ec2-ae14-96f3641e651a-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-4qh2m\" (UID: \"f95e0dc0-dc93-4ec2-ae14-96f3641e651a\") " pod="openstack/ssh-known-hosts-edpm-deployment-4qh2m" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.486729 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f95e0dc0-dc93-4ec2-ae14-96f3641e651a-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-4qh2m\" (UID: \"f95e0dc0-dc93-4ec2-ae14-96f3641e651a\") " pod="openstack/ssh-known-hosts-edpm-deployment-4qh2m" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.486754 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmxn6\" (UniqueName: \"kubernetes.io/projected/f95e0dc0-dc93-4ec2-ae14-96f3641e651a-kube-api-access-nmxn6\") pod \"ssh-known-hosts-edpm-deployment-4qh2m\" (UID: \"f95e0dc0-dc93-4ec2-ae14-96f3641e651a\") " pod="openstack/ssh-known-hosts-edpm-deployment-4qh2m" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.587947 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmxn6\" (UniqueName: \"kubernetes.io/projected/f95e0dc0-dc93-4ec2-ae14-96f3641e651a-kube-api-access-nmxn6\") pod \"ssh-known-hosts-edpm-deployment-4qh2m\" (UID: \"f95e0dc0-dc93-4ec2-ae14-96f3641e651a\") " pod="openstack/ssh-known-hosts-edpm-deployment-4qh2m" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.587997 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f95e0dc0-dc93-4ec2-ae14-96f3641e651a-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-4qh2m\" (UID: \"f95e0dc0-dc93-4ec2-ae14-96f3641e651a\") " pod="openstack/ssh-known-hosts-edpm-deployment-4qh2m" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.588108 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/f95e0dc0-dc93-4ec2-ae14-96f3641e651a-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-4qh2m\" (UID: \"f95e0dc0-dc93-4ec2-ae14-96f3641e651a\") " pod="openstack/ssh-known-hosts-edpm-deployment-4qh2m" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.591700 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f95e0dc0-dc93-4ec2-ae14-96f3641e651a-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-4qh2m\" (UID: \"f95e0dc0-dc93-4ec2-ae14-96f3641e651a\") " pod="openstack/ssh-known-hosts-edpm-deployment-4qh2m" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.591791 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/f95e0dc0-dc93-4ec2-ae14-96f3641e651a-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-4qh2m\" (UID: \"f95e0dc0-dc93-4ec2-ae14-96f3641e651a\") " pod="openstack/ssh-known-hosts-edpm-deployment-4qh2m" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.606863 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmxn6\" (UniqueName: \"kubernetes.io/projected/f95e0dc0-dc93-4ec2-ae14-96f3641e651a-kube-api-access-nmxn6\") pod \"ssh-known-hosts-edpm-deployment-4qh2m\" (UID: \"f95e0dc0-dc93-4ec2-ae14-96f3641e651a\") " pod="openstack/ssh-known-hosts-edpm-deployment-4qh2m" Oct 03 09:10:15 crc kubenswrapper[4899]: I1003 09:10:15.691721 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-4qh2m" Oct 03 09:10:16 crc kubenswrapper[4899]: I1003 09:10:16.173030 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-4qh2m"] Oct 03 09:10:16 crc kubenswrapper[4899]: I1003 09:10:16.298059 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-4qh2m" event={"ID":"f95e0dc0-dc93-4ec2-ae14-96f3641e651a","Type":"ContainerStarted","Data":"da8abb522db31d1d784ea26be2ae55f7488628303d2a34f54aa265788b91886e"} Oct 03 09:10:17 crc kubenswrapper[4899]: I1003 09:10:17.307699 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-4qh2m" event={"ID":"f95e0dc0-dc93-4ec2-ae14-96f3641e651a","Type":"ContainerStarted","Data":"0724e3404086f775016d02388ab3540dad3803972117dbef64cf57c4e77679d8"} Oct 03 09:10:17 crc kubenswrapper[4899]: I1003 09:10:17.333835 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-4qh2m" podStartSLOduration=1.613988477 podStartE2EDuration="2.333808329s" podCreationTimestamp="2025-10-03 09:10:15 +0000 UTC" firstStartedPulling="2025-10-03 09:10:16.177794266 +0000 UTC m=+1790.285279219" lastFinishedPulling="2025-10-03 09:10:16.897614118 +0000 UTC m=+1791.005099071" observedRunningTime="2025-10-03 09:10:17.322134951 +0000 UTC m=+1791.429619904" watchObservedRunningTime="2025-10-03 09:10:17.333808329 +0000 UTC m=+1791.441293282" Oct 03 09:10:22 crc kubenswrapper[4899]: I1003 09:10:22.527227 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:10:22 crc kubenswrapper[4899]: E1003 09:10:22.528686 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:10:24 crc kubenswrapper[4899]: I1003 09:10:24.382699 4899 generic.go:334] "Generic (PLEG): container finished" podID="f95e0dc0-dc93-4ec2-ae14-96f3641e651a" containerID="0724e3404086f775016d02388ab3540dad3803972117dbef64cf57c4e77679d8" exitCode=0 Oct 03 09:10:24 crc kubenswrapper[4899]: I1003 09:10:24.382782 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-4qh2m" event={"ID":"f95e0dc0-dc93-4ec2-ae14-96f3641e651a","Type":"ContainerDied","Data":"0724e3404086f775016d02388ab3540dad3803972117dbef64cf57c4e77679d8"} Oct 03 09:10:25 crc kubenswrapper[4899]: I1003 09:10:25.767018 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-4qh2m" Oct 03 09:10:25 crc kubenswrapper[4899]: I1003 09:10:25.890752 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmxn6\" (UniqueName: \"kubernetes.io/projected/f95e0dc0-dc93-4ec2-ae14-96f3641e651a-kube-api-access-nmxn6\") pod \"f95e0dc0-dc93-4ec2-ae14-96f3641e651a\" (UID: \"f95e0dc0-dc93-4ec2-ae14-96f3641e651a\") " Oct 03 09:10:25 crc kubenswrapper[4899]: I1003 09:10:25.890879 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f95e0dc0-dc93-4ec2-ae14-96f3641e651a-ssh-key-openstack-edpm-ipam\") pod \"f95e0dc0-dc93-4ec2-ae14-96f3641e651a\" (UID: \"f95e0dc0-dc93-4ec2-ae14-96f3641e651a\") " Oct 03 09:10:25 crc kubenswrapper[4899]: I1003 09:10:25.891049 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/f95e0dc0-dc93-4ec2-ae14-96f3641e651a-inventory-0\") pod \"f95e0dc0-dc93-4ec2-ae14-96f3641e651a\" (UID: \"f95e0dc0-dc93-4ec2-ae14-96f3641e651a\") " Oct 03 09:10:25 crc kubenswrapper[4899]: I1003 09:10:25.896265 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f95e0dc0-dc93-4ec2-ae14-96f3641e651a-kube-api-access-nmxn6" (OuterVolumeSpecName: "kube-api-access-nmxn6") pod "f95e0dc0-dc93-4ec2-ae14-96f3641e651a" (UID: "f95e0dc0-dc93-4ec2-ae14-96f3641e651a"). InnerVolumeSpecName "kube-api-access-nmxn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:10:25 crc kubenswrapper[4899]: I1003 09:10:25.917750 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f95e0dc0-dc93-4ec2-ae14-96f3641e651a-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "f95e0dc0-dc93-4ec2-ae14-96f3641e651a" (UID: "f95e0dc0-dc93-4ec2-ae14-96f3641e651a"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:10:25 crc kubenswrapper[4899]: I1003 09:10:25.918589 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f95e0dc0-dc93-4ec2-ae14-96f3641e651a-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "f95e0dc0-dc93-4ec2-ae14-96f3641e651a" (UID: "f95e0dc0-dc93-4ec2-ae14-96f3641e651a"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:10:25 crc kubenswrapper[4899]: I1003 09:10:25.994144 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmxn6\" (UniqueName: \"kubernetes.io/projected/f95e0dc0-dc93-4ec2-ae14-96f3641e651a-kube-api-access-nmxn6\") on node \"crc\" DevicePath \"\"" Oct 03 09:10:25 crc kubenswrapper[4899]: I1003 09:10:25.994189 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f95e0dc0-dc93-4ec2-ae14-96f3641e651a-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 03 09:10:25 crc kubenswrapper[4899]: I1003 09:10:25.994217 4899 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/f95e0dc0-dc93-4ec2-ae14-96f3641e651a-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.402241 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-4qh2m" event={"ID":"f95e0dc0-dc93-4ec2-ae14-96f3641e651a","Type":"ContainerDied","Data":"da8abb522db31d1d784ea26be2ae55f7488628303d2a34f54aa265788b91886e"} Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.402281 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da8abb522db31d1d784ea26be2ae55f7488628303d2a34f54aa265788b91886e" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.402306 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-4qh2m" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.464865 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp"] Oct 03 09:10:26 crc kubenswrapper[4899]: E1003 09:10:26.465375 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f95e0dc0-dc93-4ec2-ae14-96f3641e651a" containerName="ssh-known-hosts-edpm-deployment" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.465397 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="f95e0dc0-dc93-4ec2-ae14-96f3641e651a" containerName="ssh-known-hosts-edpm-deployment" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.465606 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="f95e0dc0-dc93-4ec2-ae14-96f3641e651a" containerName="ssh-known-hosts-edpm-deployment" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.466496 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.469122 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.469140 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.469675 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.470036 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.474966 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp"] Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.605284 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gp62s\" (UniqueName: \"kubernetes.io/projected/ec00fd0d-26af-42f1-afdd-a21d668719d5-kube-api-access-gp62s\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t58bp\" (UID: \"ec00fd0d-26af-42f1-afdd-a21d668719d5\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.606480 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ec00fd0d-26af-42f1-afdd-a21d668719d5-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t58bp\" (UID: \"ec00fd0d-26af-42f1-afdd-a21d668719d5\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.607698 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ec00fd0d-26af-42f1-afdd-a21d668719d5-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t58bp\" (UID: \"ec00fd0d-26af-42f1-afdd-a21d668719d5\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.708569 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gp62s\" (UniqueName: \"kubernetes.io/projected/ec00fd0d-26af-42f1-afdd-a21d668719d5-kube-api-access-gp62s\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t58bp\" (UID: \"ec00fd0d-26af-42f1-afdd-a21d668719d5\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.708690 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ec00fd0d-26af-42f1-afdd-a21d668719d5-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t58bp\" (UID: \"ec00fd0d-26af-42f1-afdd-a21d668719d5\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.708804 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ec00fd0d-26af-42f1-afdd-a21d668719d5-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t58bp\" (UID: \"ec00fd0d-26af-42f1-afdd-a21d668719d5\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.710927 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.711103 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.723398 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ec00fd0d-26af-42f1-afdd-a21d668719d5-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t58bp\" (UID: \"ec00fd0d-26af-42f1-afdd-a21d668719d5\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.723608 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ec00fd0d-26af-42f1-afdd-a21d668719d5-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t58bp\" (UID: \"ec00fd0d-26af-42f1-afdd-a21d668719d5\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.729109 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gp62s\" (UniqueName: \"kubernetes.io/projected/ec00fd0d-26af-42f1-afdd-a21d668719d5-kube-api-access-gp62s\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-t58bp\" (UID: \"ec00fd0d-26af-42f1-afdd-a21d668719d5\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.787048 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:10:26 crc kubenswrapper[4899]: I1003 09:10:26.794680 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp" Oct 03 09:10:27 crc kubenswrapper[4899]: I1003 09:10:27.310256 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp"] Oct 03 09:10:27 crc kubenswrapper[4899]: I1003 09:10:27.410090 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp" event={"ID":"ec00fd0d-26af-42f1-afdd-a21d668719d5","Type":"ContainerStarted","Data":"3f1ded67a4656343280680237bbc4ae40132b909da87f74ba19e0e56c0d514e6"} Oct 03 09:10:27 crc kubenswrapper[4899]: I1003 09:10:27.857713 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:10:28 crc kubenswrapper[4899]: I1003 09:10:28.419223 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp" event={"ID":"ec00fd0d-26af-42f1-afdd-a21d668719d5","Type":"ContainerStarted","Data":"ae4e70afb0aef2681f14be01253ab13ebd6db7a8294751e1f679fc55f9f8b421"} Oct 03 09:10:28 crc kubenswrapper[4899]: I1003 09:10:28.433524 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp" podStartSLOduration=1.90160922 podStartE2EDuration="2.433499512s" podCreationTimestamp="2025-10-03 09:10:26 +0000 UTC" firstStartedPulling="2025-10-03 09:10:27.321941641 +0000 UTC m=+1801.429426594" lastFinishedPulling="2025-10-03 09:10:27.853831933 +0000 UTC m=+1801.961316886" observedRunningTime="2025-10-03 09:10:28.432698986 +0000 UTC m=+1802.540183939" watchObservedRunningTime="2025-10-03 09:10:28.433499512 +0000 UTC m=+1802.540984465" Oct 03 09:10:36 crc kubenswrapper[4899]: I1003 09:10:36.490884 4899 generic.go:334] "Generic (PLEG): container finished" podID="ec00fd0d-26af-42f1-afdd-a21d668719d5" containerID="ae4e70afb0aef2681f14be01253ab13ebd6db7a8294751e1f679fc55f9f8b421" exitCode=0 Oct 03 09:10:36 crc kubenswrapper[4899]: I1003 09:10:36.490996 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp" event={"ID":"ec00fd0d-26af-42f1-afdd-a21d668719d5","Type":"ContainerDied","Data":"ae4e70afb0aef2681f14be01253ab13ebd6db7a8294751e1f679fc55f9f8b421"} Oct 03 09:10:36 crc kubenswrapper[4899]: I1003 09:10:36.534780 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:10:36 crc kubenswrapper[4899]: E1003 09:10:36.535126 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:10:37 crc kubenswrapper[4899]: I1003 09:10:37.900141 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.019688 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ec00fd0d-26af-42f1-afdd-a21d668719d5-inventory\") pod \"ec00fd0d-26af-42f1-afdd-a21d668719d5\" (UID: \"ec00fd0d-26af-42f1-afdd-a21d668719d5\") " Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.019918 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gp62s\" (UniqueName: \"kubernetes.io/projected/ec00fd0d-26af-42f1-afdd-a21d668719d5-kube-api-access-gp62s\") pod \"ec00fd0d-26af-42f1-afdd-a21d668719d5\" (UID: \"ec00fd0d-26af-42f1-afdd-a21d668719d5\") " Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.019962 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ec00fd0d-26af-42f1-afdd-a21d668719d5-ssh-key\") pod \"ec00fd0d-26af-42f1-afdd-a21d668719d5\" (UID: \"ec00fd0d-26af-42f1-afdd-a21d668719d5\") " Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.025433 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec00fd0d-26af-42f1-afdd-a21d668719d5-kube-api-access-gp62s" (OuterVolumeSpecName: "kube-api-access-gp62s") pod "ec00fd0d-26af-42f1-afdd-a21d668719d5" (UID: "ec00fd0d-26af-42f1-afdd-a21d668719d5"). InnerVolumeSpecName "kube-api-access-gp62s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.051355 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec00fd0d-26af-42f1-afdd-a21d668719d5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ec00fd0d-26af-42f1-afdd-a21d668719d5" (UID: "ec00fd0d-26af-42f1-afdd-a21d668719d5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.052439 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec00fd0d-26af-42f1-afdd-a21d668719d5-inventory" (OuterVolumeSpecName: "inventory") pod "ec00fd0d-26af-42f1-afdd-a21d668719d5" (UID: "ec00fd0d-26af-42f1-afdd-a21d668719d5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.122238 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gp62s\" (UniqueName: \"kubernetes.io/projected/ec00fd0d-26af-42f1-afdd-a21d668719d5-kube-api-access-gp62s\") on node \"crc\" DevicePath \"\"" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.122276 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ec00fd0d-26af-42f1-afdd-a21d668719d5-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.122290 4899 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ec00fd0d-26af-42f1-afdd-a21d668719d5-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.520702 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp" event={"ID":"ec00fd0d-26af-42f1-afdd-a21d668719d5","Type":"ContainerDied","Data":"3f1ded67a4656343280680237bbc4ae40132b909da87f74ba19e0e56c0d514e6"} Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.520741 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f1ded67a4656343280680237bbc4ae40132b909da87f74ba19e0e56c0d514e6" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.520806 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-t58bp" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.577113 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h"] Oct 03 09:10:38 crc kubenswrapper[4899]: E1003 09:10:38.577624 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec00fd0d-26af-42f1-afdd-a21d668719d5" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.577643 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec00fd0d-26af-42f1-afdd-a21d668719d5" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.577850 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec00fd0d-26af-42f1-afdd-a21d668719d5" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.578685 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.581714 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.583339 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.583859 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.584042 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.594943 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h"] Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.731475 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23483d3a-dd9e-4fcd-81a6-465936a69838-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-df25h\" (UID: \"23483d3a-dd9e-4fcd-81a6-465936a69838\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.731613 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xq4s5\" (UniqueName: \"kubernetes.io/projected/23483d3a-dd9e-4fcd-81a6-465936a69838-kube-api-access-xq4s5\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-df25h\" (UID: \"23483d3a-dd9e-4fcd-81a6-465936a69838\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.731838 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23483d3a-dd9e-4fcd-81a6-465936a69838-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-df25h\" (UID: \"23483d3a-dd9e-4fcd-81a6-465936a69838\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.833862 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23483d3a-dd9e-4fcd-81a6-465936a69838-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-df25h\" (UID: \"23483d3a-dd9e-4fcd-81a6-465936a69838\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.834094 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xq4s5\" (UniqueName: \"kubernetes.io/projected/23483d3a-dd9e-4fcd-81a6-465936a69838-kube-api-access-xq4s5\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-df25h\" (UID: \"23483d3a-dd9e-4fcd-81a6-465936a69838\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.834159 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23483d3a-dd9e-4fcd-81a6-465936a69838-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-df25h\" (UID: \"23483d3a-dd9e-4fcd-81a6-465936a69838\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.838967 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23483d3a-dd9e-4fcd-81a6-465936a69838-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-df25h\" (UID: \"23483d3a-dd9e-4fcd-81a6-465936a69838\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.839192 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23483d3a-dd9e-4fcd-81a6-465936a69838-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-df25h\" (UID: \"23483d3a-dd9e-4fcd-81a6-465936a69838\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.853264 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xq4s5\" (UniqueName: \"kubernetes.io/projected/23483d3a-dd9e-4fcd-81a6-465936a69838-kube-api-access-xq4s5\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-df25h\" (UID: \"23483d3a-dd9e-4fcd-81a6-465936a69838\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h" Oct 03 09:10:38 crc kubenswrapper[4899]: I1003 09:10:38.900558 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h" Oct 03 09:10:39 crc kubenswrapper[4899]: I1003 09:10:39.386345 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h"] Oct 03 09:10:39 crc kubenswrapper[4899]: I1003 09:10:39.529039 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h" event={"ID":"23483d3a-dd9e-4fcd-81a6-465936a69838","Type":"ContainerStarted","Data":"e0842cfe8dcd91de993f66a13957a66ec8f7a5a0a8f308e3ff65ce44e38530b6"} Oct 03 09:10:40 crc kubenswrapper[4899]: I1003 09:10:40.590149 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h" event={"ID":"23483d3a-dd9e-4fcd-81a6-465936a69838","Type":"ContainerStarted","Data":"5008a6dcde34d0ccbb4879d400966510caad6415a9b62efe421ccbf61f63f2ad"} Oct 03 09:10:40 crc kubenswrapper[4899]: I1003 09:10:40.619185 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h" podStartSLOduration=1.778755646 podStartE2EDuration="2.619163347s" podCreationTimestamp="2025-10-03 09:10:38 +0000 UTC" firstStartedPulling="2025-10-03 09:10:39.396411047 +0000 UTC m=+1813.503896000" lastFinishedPulling="2025-10-03 09:10:40.236818748 +0000 UTC m=+1814.344303701" observedRunningTime="2025-10-03 09:10:40.617144464 +0000 UTC m=+1814.724629417" watchObservedRunningTime="2025-10-03 09:10:40.619163347 +0000 UTC m=+1814.726648300" Oct 03 09:10:48 crc kubenswrapper[4899]: I1003 09:10:48.527433 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:10:49 crc kubenswrapper[4899]: I1003 09:10:49.658433 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerStarted","Data":"b2b34b8d69d69b7d2982e1866f37318d87ef851d9135ccb09c02fa5d8cd572f5"} Oct 03 09:10:49 crc kubenswrapper[4899]: I1003 09:10:49.663840 4899 generic.go:334] "Generic (PLEG): container finished" podID="23483d3a-dd9e-4fcd-81a6-465936a69838" containerID="5008a6dcde34d0ccbb4879d400966510caad6415a9b62efe421ccbf61f63f2ad" exitCode=0 Oct 03 09:10:49 crc kubenswrapper[4899]: I1003 09:10:49.663902 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h" event={"ID":"23483d3a-dd9e-4fcd-81a6-465936a69838","Type":"ContainerDied","Data":"5008a6dcde34d0ccbb4879d400966510caad6415a9b62efe421ccbf61f63f2ad"} Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.058242 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.161148 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23483d3a-dd9e-4fcd-81a6-465936a69838-inventory\") pod \"23483d3a-dd9e-4fcd-81a6-465936a69838\" (UID: \"23483d3a-dd9e-4fcd-81a6-465936a69838\") " Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.161317 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23483d3a-dd9e-4fcd-81a6-465936a69838-ssh-key\") pod \"23483d3a-dd9e-4fcd-81a6-465936a69838\" (UID: \"23483d3a-dd9e-4fcd-81a6-465936a69838\") " Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.161386 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xq4s5\" (UniqueName: \"kubernetes.io/projected/23483d3a-dd9e-4fcd-81a6-465936a69838-kube-api-access-xq4s5\") pod \"23483d3a-dd9e-4fcd-81a6-465936a69838\" (UID: \"23483d3a-dd9e-4fcd-81a6-465936a69838\") " Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.166300 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23483d3a-dd9e-4fcd-81a6-465936a69838-kube-api-access-xq4s5" (OuterVolumeSpecName: "kube-api-access-xq4s5") pod "23483d3a-dd9e-4fcd-81a6-465936a69838" (UID: "23483d3a-dd9e-4fcd-81a6-465936a69838"). InnerVolumeSpecName "kube-api-access-xq4s5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.190188 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23483d3a-dd9e-4fcd-81a6-465936a69838-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "23483d3a-dd9e-4fcd-81a6-465936a69838" (UID: "23483d3a-dd9e-4fcd-81a6-465936a69838"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.192988 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23483d3a-dd9e-4fcd-81a6-465936a69838-inventory" (OuterVolumeSpecName: "inventory") pod "23483d3a-dd9e-4fcd-81a6-465936a69838" (UID: "23483d3a-dd9e-4fcd-81a6-465936a69838"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.263709 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23483d3a-dd9e-4fcd-81a6-465936a69838-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.263749 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xq4s5\" (UniqueName: \"kubernetes.io/projected/23483d3a-dd9e-4fcd-81a6-465936a69838-kube-api-access-xq4s5\") on node \"crc\" DevicePath \"\"" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.263766 4899 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23483d3a-dd9e-4fcd-81a6-465936a69838-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.678852 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h" event={"ID":"23483d3a-dd9e-4fcd-81a6-465936a69838","Type":"ContainerDied","Data":"e0842cfe8dcd91de993f66a13957a66ec8f7a5a0a8f308e3ff65ce44e38530b6"} Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.679141 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0842cfe8dcd91de993f66a13957a66ec8f7a5a0a8f308e3ff65ce44e38530b6" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.678913 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-df25h" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.855432 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w"] Oct 03 09:10:51 crc kubenswrapper[4899]: E1003 09:10:51.855922 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23483d3a-dd9e-4fcd-81a6-465936a69838" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.855954 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="23483d3a-dd9e-4fcd-81a6-465936a69838" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.856214 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="23483d3a-dd9e-4fcd-81a6-465936a69838" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.857023 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.860685 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.890254 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.890543 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.890706 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.890869 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.891046 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.891741 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.895739 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.918040 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w"] Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.921204 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.921254 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.921287 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.921413 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.921540 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.921655 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.921857 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.921940 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.921995 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.922106 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6j48\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-kube-api-access-s6j48\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.922477 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.922538 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.922570 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:51 crc kubenswrapper[4899]: I1003 09:10:51.922650 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.024503 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.024563 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.024620 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.024658 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.024724 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6j48\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-kube-api-access-s6j48\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.024762 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.024790 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.024815 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.024864 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.024943 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.024973 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.025020 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.025048 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.025085 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.030592 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.031448 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.031576 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.032002 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.032198 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.032368 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.032483 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.032496 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.032655 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.032937 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.033465 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.033711 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.034916 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.043024 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6j48\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-kube-api-access-s6j48\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hr45w\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.214797 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:10:52 crc kubenswrapper[4899]: I1003 09:10:52.726616 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w"] Oct 03 09:10:52 crc kubenswrapper[4899]: W1003 09:10:52.730808 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3aad51f1_e1c6_4677_a00a_e81438b9650a.slice/crio-866b8ca2f2edd9e87a2731bcef23ea7a4472c247a5d94007ebc9e90ad9231388 WatchSource:0}: Error finding container 866b8ca2f2edd9e87a2731bcef23ea7a4472c247a5d94007ebc9e90ad9231388: Status 404 returned error can't find the container with id 866b8ca2f2edd9e87a2731bcef23ea7a4472c247a5d94007ebc9e90ad9231388 Oct 03 09:10:53 crc kubenswrapper[4899]: I1003 09:10:53.711158 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" event={"ID":"3aad51f1-e1c6-4677-a00a-e81438b9650a","Type":"ContainerStarted","Data":"5d34c948b38f3fec4f935d85fc8499c577db2d312ebb9efc319926833142fef9"} Oct 03 09:10:53 crc kubenswrapper[4899]: I1003 09:10:53.711569 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" event={"ID":"3aad51f1-e1c6-4677-a00a-e81438b9650a","Type":"ContainerStarted","Data":"866b8ca2f2edd9e87a2731bcef23ea7a4472c247a5d94007ebc9e90ad9231388"} Oct 03 09:10:53 crc kubenswrapper[4899]: I1003 09:10:53.739006 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" podStartSLOduration=2.348196748 podStartE2EDuration="2.738987344s" podCreationTimestamp="2025-10-03 09:10:51 +0000 UTC" firstStartedPulling="2025-10-03 09:10:52.733498852 +0000 UTC m=+1826.840983805" lastFinishedPulling="2025-10-03 09:10:53.124289448 +0000 UTC m=+1827.231774401" observedRunningTime="2025-10-03 09:10:53.730132584 +0000 UTC m=+1827.837617537" watchObservedRunningTime="2025-10-03 09:10:53.738987344 +0000 UTC m=+1827.846472297" Oct 03 09:11:33 crc kubenswrapper[4899]: I1003 09:11:33.051439 4899 generic.go:334] "Generic (PLEG): container finished" podID="3aad51f1-e1c6-4677-a00a-e81438b9650a" containerID="5d34c948b38f3fec4f935d85fc8499c577db2d312ebb9efc319926833142fef9" exitCode=0 Oct 03 09:11:33 crc kubenswrapper[4899]: I1003 09:11:33.051439 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" event={"ID":"3aad51f1-e1c6-4677-a00a-e81438b9650a","Type":"ContainerDied","Data":"5d34c948b38f3fec4f935d85fc8499c577db2d312ebb9efc319926833142fef9"} Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.440134 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.623246 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-bootstrap-combined-ca-bundle\") pod \"3aad51f1-e1c6-4677-a00a-e81438b9650a\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.623610 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-nova-combined-ca-bundle\") pod \"3aad51f1-e1c6-4677-a00a-e81438b9650a\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.623695 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-ovn-combined-ca-bundle\") pod \"3aad51f1-e1c6-4677-a00a-e81438b9650a\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.623728 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"3aad51f1-e1c6-4677-a00a-e81438b9650a\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.623792 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-ssh-key\") pod \"3aad51f1-e1c6-4677-a00a-e81438b9650a\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.623828 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-repo-setup-combined-ca-bundle\") pod \"3aad51f1-e1c6-4677-a00a-e81438b9650a\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.623853 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-telemetry-combined-ca-bundle\") pod \"3aad51f1-e1c6-4677-a00a-e81438b9650a\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.623885 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"3aad51f1-e1c6-4677-a00a-e81438b9650a\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.623923 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"3aad51f1-e1c6-4677-a00a-e81438b9650a\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.623959 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6j48\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-kube-api-access-s6j48\") pod \"3aad51f1-e1c6-4677-a00a-e81438b9650a\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.624030 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-ovn-default-certs-0\") pod \"3aad51f1-e1c6-4677-a00a-e81438b9650a\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.624060 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-inventory\") pod \"3aad51f1-e1c6-4677-a00a-e81438b9650a\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.624089 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-libvirt-combined-ca-bundle\") pod \"3aad51f1-e1c6-4677-a00a-e81438b9650a\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.624123 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-neutron-metadata-combined-ca-bundle\") pod \"3aad51f1-e1c6-4677-a00a-e81438b9650a\" (UID: \"3aad51f1-e1c6-4677-a00a-e81438b9650a\") " Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.629422 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "3aad51f1-e1c6-4677-a00a-e81438b9650a" (UID: "3aad51f1-e1c6-4677-a00a-e81438b9650a"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.629977 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "3aad51f1-e1c6-4677-a00a-e81438b9650a" (UID: "3aad51f1-e1c6-4677-a00a-e81438b9650a"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.630000 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "3aad51f1-e1c6-4677-a00a-e81438b9650a" (UID: "3aad51f1-e1c6-4677-a00a-e81438b9650a"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.630715 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "3aad51f1-e1c6-4677-a00a-e81438b9650a" (UID: "3aad51f1-e1c6-4677-a00a-e81438b9650a"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.631722 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "3aad51f1-e1c6-4677-a00a-e81438b9650a" (UID: "3aad51f1-e1c6-4677-a00a-e81438b9650a"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.631764 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "3aad51f1-e1c6-4677-a00a-e81438b9650a" (UID: "3aad51f1-e1c6-4677-a00a-e81438b9650a"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.632082 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "3aad51f1-e1c6-4677-a00a-e81438b9650a" (UID: "3aad51f1-e1c6-4677-a00a-e81438b9650a"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.632454 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "3aad51f1-e1c6-4677-a00a-e81438b9650a" (UID: "3aad51f1-e1c6-4677-a00a-e81438b9650a"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.632881 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "3aad51f1-e1c6-4677-a00a-e81438b9650a" (UID: "3aad51f1-e1c6-4677-a00a-e81438b9650a"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.632990 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "3aad51f1-e1c6-4677-a00a-e81438b9650a" (UID: "3aad51f1-e1c6-4677-a00a-e81438b9650a"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.633404 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-kube-api-access-s6j48" (OuterVolumeSpecName: "kube-api-access-s6j48") pod "3aad51f1-e1c6-4677-a00a-e81438b9650a" (UID: "3aad51f1-e1c6-4677-a00a-e81438b9650a"). InnerVolumeSpecName "kube-api-access-s6j48". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.635377 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "3aad51f1-e1c6-4677-a00a-e81438b9650a" (UID: "3aad51f1-e1c6-4677-a00a-e81438b9650a"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.653022 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-inventory" (OuterVolumeSpecName: "inventory") pod "3aad51f1-e1c6-4677-a00a-e81438b9650a" (UID: "3aad51f1-e1c6-4677-a00a-e81438b9650a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.658161 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3aad51f1-e1c6-4677-a00a-e81438b9650a" (UID: "3aad51f1-e1c6-4677-a00a-e81438b9650a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.726244 4899 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.726281 4899 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.726293 4899 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.726303 4899 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.726312 4899 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.726325 4899 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.726333 4899 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.726342 4899 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.726351 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.726359 4899 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.726483 4899 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aad51f1-e1c6-4677-a00a-e81438b9650a-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.726497 4899 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.726507 4899 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 03 09:11:34 crc kubenswrapper[4899]: I1003 09:11:34.726516 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6j48\" (UniqueName: \"kubernetes.io/projected/3aad51f1-e1c6-4677-a00a-e81438b9650a-kube-api-access-s6j48\") on node \"crc\" DevicePath \"\"" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.069773 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" event={"ID":"3aad51f1-e1c6-4677-a00a-e81438b9650a","Type":"ContainerDied","Data":"866b8ca2f2edd9e87a2731bcef23ea7a4472c247a5d94007ebc9e90ad9231388"} Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.069814 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hr45w" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.069819 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="866b8ca2f2edd9e87a2731bcef23ea7a4472c247a5d94007ebc9e90ad9231388" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.157955 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz"] Oct 03 09:11:35 crc kubenswrapper[4899]: E1003 09:11:35.158459 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3aad51f1-e1c6-4677-a00a-e81438b9650a" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.158481 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="3aad51f1-e1c6-4677-a00a-e81438b9650a" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.158732 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="3aad51f1-e1c6-4677-a00a-e81438b9650a" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.159550 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.163837 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.168369 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.168369 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.170168 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.172205 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz"] Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.175718 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.335517 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2d954cfa-e3a2-4fc0-a1af-61234475db07-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2swmz\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.335594 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d954cfa-e3a2-4fc0-a1af-61234475db07-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2swmz\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.335627 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d954cfa-e3a2-4fc0-a1af-61234475db07-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2swmz\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.335698 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d954cfa-e3a2-4fc0-a1af-61234475db07-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2swmz\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.335821 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92ltz\" (UniqueName: \"kubernetes.io/projected/2d954cfa-e3a2-4fc0-a1af-61234475db07-kube-api-access-92ltz\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2swmz\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.437332 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2d954cfa-e3a2-4fc0-a1af-61234475db07-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2swmz\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.437392 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d954cfa-e3a2-4fc0-a1af-61234475db07-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2swmz\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.437414 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d954cfa-e3a2-4fc0-a1af-61234475db07-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2swmz\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.437462 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d954cfa-e3a2-4fc0-a1af-61234475db07-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2swmz\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.437529 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92ltz\" (UniqueName: \"kubernetes.io/projected/2d954cfa-e3a2-4fc0-a1af-61234475db07-kube-api-access-92ltz\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2swmz\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.438617 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2d954cfa-e3a2-4fc0-a1af-61234475db07-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2swmz\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.442030 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d954cfa-e3a2-4fc0-a1af-61234475db07-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2swmz\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.442285 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d954cfa-e3a2-4fc0-a1af-61234475db07-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2swmz\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.442822 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d954cfa-e3a2-4fc0-a1af-61234475db07-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2swmz\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.455935 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92ltz\" (UniqueName: \"kubernetes.io/projected/2d954cfa-e3a2-4fc0-a1af-61234475db07-kube-api-access-92ltz\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2swmz\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.477535 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.966786 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz"] Oct 03 09:11:35 crc kubenswrapper[4899]: I1003 09:11:35.972210 4899 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 09:11:36 crc kubenswrapper[4899]: I1003 09:11:36.085907 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" event={"ID":"2d954cfa-e3a2-4fc0-a1af-61234475db07","Type":"ContainerStarted","Data":"99d4086a4a89e3f4c97430b7067c3dacea9815f25c1fc319b179088ebb97caec"} Oct 03 09:11:37 crc kubenswrapper[4899]: I1003 09:11:37.097207 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" event={"ID":"2d954cfa-e3a2-4fc0-a1af-61234475db07","Type":"ContainerStarted","Data":"37cc1b726851503006288daa51a2013ade10f4fdd155095f0945e83b532d500d"} Oct 03 09:11:37 crc kubenswrapper[4899]: I1003 09:11:37.113340 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" podStartSLOduration=1.5915081450000002 podStartE2EDuration="2.113319268s" podCreationTimestamp="2025-10-03 09:11:35 +0000 UTC" firstStartedPulling="2025-10-03 09:11:35.971944576 +0000 UTC m=+1870.079429529" lastFinishedPulling="2025-10-03 09:11:36.493755699 +0000 UTC m=+1870.601240652" observedRunningTime="2025-10-03 09:11:37.112860294 +0000 UTC m=+1871.220345247" watchObservedRunningTime="2025-10-03 09:11:37.113319268 +0000 UTC m=+1871.220804221" Oct 03 09:12:36 crc kubenswrapper[4899]: I1003 09:12:36.583909 4899 generic.go:334] "Generic (PLEG): container finished" podID="2d954cfa-e3a2-4fc0-a1af-61234475db07" containerID="37cc1b726851503006288daa51a2013ade10f4fdd155095f0945e83b532d500d" exitCode=0 Oct 03 09:12:36 crc kubenswrapper[4899]: I1003 09:12:36.583960 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" event={"ID":"2d954cfa-e3a2-4fc0-a1af-61234475db07","Type":"ContainerDied","Data":"37cc1b726851503006288daa51a2013ade10f4fdd155095f0945e83b532d500d"} Oct 03 09:12:37 crc kubenswrapper[4899]: I1003 09:12:37.967951 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.043063 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2d954cfa-e3a2-4fc0-a1af-61234475db07-ovncontroller-config-0\") pod \"2d954cfa-e3a2-4fc0-a1af-61234475db07\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.043541 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d954cfa-e3a2-4fc0-a1af-61234475db07-ovn-combined-ca-bundle\") pod \"2d954cfa-e3a2-4fc0-a1af-61234475db07\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.043596 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92ltz\" (UniqueName: \"kubernetes.io/projected/2d954cfa-e3a2-4fc0-a1af-61234475db07-kube-api-access-92ltz\") pod \"2d954cfa-e3a2-4fc0-a1af-61234475db07\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.043775 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d954cfa-e3a2-4fc0-a1af-61234475db07-inventory\") pod \"2d954cfa-e3a2-4fc0-a1af-61234475db07\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.043823 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d954cfa-e3a2-4fc0-a1af-61234475db07-ssh-key\") pod \"2d954cfa-e3a2-4fc0-a1af-61234475db07\" (UID: \"2d954cfa-e3a2-4fc0-a1af-61234475db07\") " Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.048745 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d954cfa-e3a2-4fc0-a1af-61234475db07-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "2d954cfa-e3a2-4fc0-a1af-61234475db07" (UID: "2d954cfa-e3a2-4fc0-a1af-61234475db07"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.048833 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d954cfa-e3a2-4fc0-a1af-61234475db07-kube-api-access-92ltz" (OuterVolumeSpecName: "kube-api-access-92ltz") pod "2d954cfa-e3a2-4fc0-a1af-61234475db07" (UID: "2d954cfa-e3a2-4fc0-a1af-61234475db07"). InnerVolumeSpecName "kube-api-access-92ltz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.068126 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d954cfa-e3a2-4fc0-a1af-61234475db07-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "2d954cfa-e3a2-4fc0-a1af-61234475db07" (UID: "2d954cfa-e3a2-4fc0-a1af-61234475db07"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.070388 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d954cfa-e3a2-4fc0-a1af-61234475db07-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2d954cfa-e3a2-4fc0-a1af-61234475db07" (UID: "2d954cfa-e3a2-4fc0-a1af-61234475db07"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.071803 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d954cfa-e3a2-4fc0-a1af-61234475db07-inventory" (OuterVolumeSpecName: "inventory") pod "2d954cfa-e3a2-4fc0-a1af-61234475db07" (UID: "2d954cfa-e3a2-4fc0-a1af-61234475db07"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.146211 4899 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d954cfa-e3a2-4fc0-a1af-61234475db07-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.146242 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d954cfa-e3a2-4fc0-a1af-61234475db07-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.146252 4899 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2d954cfa-e3a2-4fc0-a1af-61234475db07-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.146262 4899 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d954cfa-e3a2-4fc0-a1af-61234475db07-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.146272 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92ltz\" (UniqueName: \"kubernetes.io/projected/2d954cfa-e3a2-4fc0-a1af-61234475db07-kube-api-access-92ltz\") on node \"crc\" DevicePath \"\"" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.602636 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" event={"ID":"2d954cfa-e3a2-4fc0-a1af-61234475db07","Type":"ContainerDied","Data":"99d4086a4a89e3f4c97430b7067c3dacea9815f25c1fc319b179088ebb97caec"} Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.602678 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99d4086a4a89e3f4c97430b7067c3dacea9815f25c1fc319b179088ebb97caec" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.602696 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2swmz" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.676820 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l"] Oct 03 09:12:38 crc kubenswrapper[4899]: E1003 09:12:38.677324 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d954cfa-e3a2-4fc0-a1af-61234475db07" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.677348 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d954cfa-e3a2-4fc0-a1af-61234475db07" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.677571 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d954cfa-e3a2-4fc0-a1af-61234475db07" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.678354 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.680877 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.681171 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.681486 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.681695 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.681827 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.682184 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.699533 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l"] Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.758779 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.758885 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6s8p\" (UniqueName: \"kubernetes.io/projected/9cf278a1-e80c-4739-9166-b75a8f6f3aea-kube-api-access-x6s8p\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.758959 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.759204 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.759391 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.759452 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.861494 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.861540 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6s8p\" (UniqueName: \"kubernetes.io/projected/9cf278a1-e80c-4739-9166-b75a8f6f3aea-kube-api-access-x6s8p\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.861573 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.861632 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.861684 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.861710 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.866935 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.866962 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.867193 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.868173 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.868249 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.878069 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6s8p\" (UniqueName: \"kubernetes.io/projected/9cf278a1-e80c-4739-9166-b75a8f6f3aea-kube-api-access-x6s8p\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:38 crc kubenswrapper[4899]: I1003 09:12:38.996528 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:12:39 crc kubenswrapper[4899]: I1003 09:12:39.487568 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l"] Oct 03 09:12:39 crc kubenswrapper[4899]: W1003 09:12:39.492346 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9cf278a1_e80c_4739_9166_b75a8f6f3aea.slice/crio-42664711eb835692dfb18b0dcbfa1bba395b88126e85dbe96fa0fdc4d8c8ac49 WatchSource:0}: Error finding container 42664711eb835692dfb18b0dcbfa1bba395b88126e85dbe96fa0fdc4d8c8ac49: Status 404 returned error can't find the container with id 42664711eb835692dfb18b0dcbfa1bba395b88126e85dbe96fa0fdc4d8c8ac49 Oct 03 09:12:39 crc kubenswrapper[4899]: I1003 09:12:39.613622 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" event={"ID":"9cf278a1-e80c-4739-9166-b75a8f6f3aea","Type":"ContainerStarted","Data":"42664711eb835692dfb18b0dcbfa1bba395b88126e85dbe96fa0fdc4d8c8ac49"} Oct 03 09:12:40 crc kubenswrapper[4899]: I1003 09:12:40.622927 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" event={"ID":"9cf278a1-e80c-4739-9166-b75a8f6f3aea","Type":"ContainerStarted","Data":"8c79b6453507fcc76f95254c8a221654a636f14b9627d64645079891f7126a44"} Oct 03 09:12:40 crc kubenswrapper[4899]: I1003 09:12:40.642329 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" podStartSLOduration=2.018025526 podStartE2EDuration="2.642308313s" podCreationTimestamp="2025-10-03 09:12:38 +0000 UTC" firstStartedPulling="2025-10-03 09:12:39.494419876 +0000 UTC m=+1933.601904829" lastFinishedPulling="2025-10-03 09:12:40.118702663 +0000 UTC m=+1934.226187616" observedRunningTime="2025-10-03 09:12:40.636293074 +0000 UTC m=+1934.743778027" watchObservedRunningTime="2025-10-03 09:12:40.642308313 +0000 UTC m=+1934.749793266" Oct 03 09:13:12 crc kubenswrapper[4899]: I1003 09:13:12.198611 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:13:12 crc kubenswrapper[4899]: I1003 09:13:12.199258 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:13:26 crc kubenswrapper[4899]: I1003 09:13:26.003616 4899 generic.go:334] "Generic (PLEG): container finished" podID="9cf278a1-e80c-4739-9166-b75a8f6f3aea" containerID="8c79b6453507fcc76f95254c8a221654a636f14b9627d64645079891f7126a44" exitCode=0 Oct 03 09:13:26 crc kubenswrapper[4899]: I1003 09:13:26.003690 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" event={"ID":"9cf278a1-e80c-4739-9166-b75a8f6f3aea","Type":"ContainerDied","Data":"8c79b6453507fcc76f95254c8a221654a636f14b9627d64645079891f7126a44"} Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.413419 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.491125 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-neutron-metadata-combined-ca-bundle\") pod \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.491174 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-ssh-key\") pod \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.491196 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-inventory\") pod \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.491262 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-nova-metadata-neutron-config-0\") pod \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.491351 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-neutron-ovn-metadata-agent-neutron-config-0\") pod \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.491823 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6s8p\" (UniqueName: \"kubernetes.io/projected/9cf278a1-e80c-4739-9166-b75a8f6f3aea-kube-api-access-x6s8p\") pod \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\" (UID: \"9cf278a1-e80c-4739-9166-b75a8f6f3aea\") " Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.496450 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "9cf278a1-e80c-4739-9166-b75a8f6f3aea" (UID: "9cf278a1-e80c-4739-9166-b75a8f6f3aea"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.497131 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cf278a1-e80c-4739-9166-b75a8f6f3aea-kube-api-access-x6s8p" (OuterVolumeSpecName: "kube-api-access-x6s8p") pod "9cf278a1-e80c-4739-9166-b75a8f6f3aea" (UID: "9cf278a1-e80c-4739-9166-b75a8f6f3aea"). InnerVolumeSpecName "kube-api-access-x6s8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.518122 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "9cf278a1-e80c-4739-9166-b75a8f6f3aea" (UID: "9cf278a1-e80c-4739-9166-b75a8f6f3aea"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.521710 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-inventory" (OuterVolumeSpecName: "inventory") pod "9cf278a1-e80c-4739-9166-b75a8f6f3aea" (UID: "9cf278a1-e80c-4739-9166-b75a8f6f3aea"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.522286 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9cf278a1-e80c-4739-9166-b75a8f6f3aea" (UID: "9cf278a1-e80c-4739-9166-b75a8f6f3aea"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.528160 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "9cf278a1-e80c-4739-9166-b75a8f6f3aea" (UID: "9cf278a1-e80c-4739-9166-b75a8f6f3aea"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.595040 4899 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.595086 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.595101 4899 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.595113 4899 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.595128 4899 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9cf278a1-e80c-4739-9166-b75a8f6f3aea-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 09:13:27 crc kubenswrapper[4899]: I1003 09:13:27.595145 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6s8p\" (UniqueName: \"kubernetes.io/projected/9cf278a1-e80c-4739-9166-b75a8f6f3aea-kube-api-access-x6s8p\") on node \"crc\" DevicePath \"\"" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.035013 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" event={"ID":"9cf278a1-e80c-4739-9166-b75a8f6f3aea","Type":"ContainerDied","Data":"42664711eb835692dfb18b0dcbfa1bba395b88126e85dbe96fa0fdc4d8c8ac49"} Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.035092 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42664711eb835692dfb18b0dcbfa1bba395b88126e85dbe96fa0fdc4d8c8ac49" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.035120 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.105807 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474"] Oct 03 09:13:28 crc kubenswrapper[4899]: E1003 09:13:28.106530 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf278a1-e80c-4739-9166-b75a8f6f3aea" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.106547 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf278a1-e80c-4739-9166-b75a8f6f3aea" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.106726 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cf278a1-e80c-4739-9166-b75a8f6f3aea" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.107508 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.109101 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.109795 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.110092 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.110209 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.110374 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.119564 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474"] Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.205107 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cb474\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.205168 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cb474\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.205203 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cb474\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.205245 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gs7l\" (UniqueName: \"kubernetes.io/projected/d9001fcb-add1-41c8-9638-097229339246-kube-api-access-5gs7l\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cb474\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.205287 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cb474\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.306576 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gs7l\" (UniqueName: \"kubernetes.io/projected/d9001fcb-add1-41c8-9638-097229339246-kube-api-access-5gs7l\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cb474\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.306643 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cb474\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.306740 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cb474\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.306769 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cb474\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.306802 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cb474\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.310796 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cb474\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.310850 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cb474\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.311590 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cb474\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.312117 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cb474\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.325447 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gs7l\" (UniqueName: \"kubernetes.io/projected/d9001fcb-add1-41c8-9638-097229339246-kube-api-access-5gs7l\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cb474\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.423240 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:13:28 crc kubenswrapper[4899]: I1003 09:13:28.930168 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474"] Oct 03 09:13:29 crc kubenswrapper[4899]: I1003 09:13:29.083248 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" event={"ID":"d9001fcb-add1-41c8-9638-097229339246","Type":"ContainerStarted","Data":"1a00fdbfc841d7957e7888505e85c9b9e93977f5c493b59c34e7e4780cbf0e98"} Oct 03 09:13:30 crc kubenswrapper[4899]: I1003 09:13:30.095874 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" event={"ID":"d9001fcb-add1-41c8-9638-097229339246","Type":"ContainerStarted","Data":"f1c74955aad972357d9ed2abb328681c5806afd86ec953f4b209f8121bd8ec58"} Oct 03 09:13:30 crc kubenswrapper[4899]: I1003 09:13:30.116270 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" podStartSLOduration=1.598227655 podStartE2EDuration="2.116255378s" podCreationTimestamp="2025-10-03 09:13:28 +0000 UTC" firstStartedPulling="2025-10-03 09:13:28.934006826 +0000 UTC m=+1983.041491779" lastFinishedPulling="2025-10-03 09:13:29.452034549 +0000 UTC m=+1983.559519502" observedRunningTime="2025-10-03 09:13:30.113183071 +0000 UTC m=+1984.220668054" watchObservedRunningTime="2025-10-03 09:13:30.116255378 +0000 UTC m=+1984.223740321" Oct 03 09:13:42 crc kubenswrapper[4899]: I1003 09:13:42.198190 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:13:42 crc kubenswrapper[4899]: I1003 09:13:42.198906 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:14:12 crc kubenswrapper[4899]: I1003 09:14:12.198180 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:14:12 crc kubenswrapper[4899]: I1003 09:14:12.198761 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:14:12 crc kubenswrapper[4899]: I1003 09:14:12.198821 4899 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 09:14:12 crc kubenswrapper[4899]: I1003 09:14:12.199698 4899 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b2b34b8d69d69b7d2982e1866f37318d87ef851d9135ccb09c02fa5d8cd572f5"} pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 09:14:12 crc kubenswrapper[4899]: I1003 09:14:12.199756 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" containerID="cri-o://b2b34b8d69d69b7d2982e1866f37318d87ef851d9135ccb09c02fa5d8cd572f5" gracePeriod=600 Oct 03 09:14:12 crc kubenswrapper[4899]: I1003 09:14:12.470113 4899 generic.go:334] "Generic (PLEG): container finished" podID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerID="b2b34b8d69d69b7d2982e1866f37318d87ef851d9135ccb09c02fa5d8cd572f5" exitCode=0 Oct 03 09:14:12 crc kubenswrapper[4899]: I1003 09:14:12.470154 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerDied","Data":"b2b34b8d69d69b7d2982e1866f37318d87ef851d9135ccb09c02fa5d8cd572f5"} Oct 03 09:14:12 crc kubenswrapper[4899]: I1003 09:14:12.470185 4899 scope.go:117] "RemoveContainer" containerID="7d57a032b965867b4bac5617a40db55d85f094c7a67bc0287016c7e2a2fe743b" Oct 03 09:14:13 crc kubenswrapper[4899]: I1003 09:14:13.479935 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerStarted","Data":"ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27"} Oct 03 09:14:35 crc kubenswrapper[4899]: I1003 09:14:35.085131 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xw5wc"] Oct 03 09:14:35 crc kubenswrapper[4899]: I1003 09:14:35.087623 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xw5wc" Oct 03 09:14:35 crc kubenswrapper[4899]: I1003 09:14:35.094704 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9qjc\" (UniqueName: \"kubernetes.io/projected/7acc767f-da7a-4e39-90e2-f504c5b40827-kube-api-access-g9qjc\") pod \"community-operators-xw5wc\" (UID: \"7acc767f-da7a-4e39-90e2-f504c5b40827\") " pod="openshift-marketplace/community-operators-xw5wc" Oct 03 09:14:35 crc kubenswrapper[4899]: I1003 09:14:35.094799 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7acc767f-da7a-4e39-90e2-f504c5b40827-catalog-content\") pod \"community-operators-xw5wc\" (UID: \"7acc767f-da7a-4e39-90e2-f504c5b40827\") " pod="openshift-marketplace/community-operators-xw5wc" Oct 03 09:14:35 crc kubenswrapper[4899]: I1003 09:14:35.094926 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7acc767f-da7a-4e39-90e2-f504c5b40827-utilities\") pod \"community-operators-xw5wc\" (UID: \"7acc767f-da7a-4e39-90e2-f504c5b40827\") " pod="openshift-marketplace/community-operators-xw5wc" Oct 03 09:14:35 crc kubenswrapper[4899]: I1003 09:14:35.100094 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xw5wc"] Oct 03 09:14:35 crc kubenswrapper[4899]: I1003 09:14:35.197009 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7acc767f-da7a-4e39-90e2-f504c5b40827-catalog-content\") pod \"community-operators-xw5wc\" (UID: \"7acc767f-da7a-4e39-90e2-f504c5b40827\") " pod="openshift-marketplace/community-operators-xw5wc" Oct 03 09:14:35 crc kubenswrapper[4899]: I1003 09:14:35.197178 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7acc767f-da7a-4e39-90e2-f504c5b40827-utilities\") pod \"community-operators-xw5wc\" (UID: \"7acc767f-da7a-4e39-90e2-f504c5b40827\") " pod="openshift-marketplace/community-operators-xw5wc" Oct 03 09:14:35 crc kubenswrapper[4899]: I1003 09:14:35.197248 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9qjc\" (UniqueName: \"kubernetes.io/projected/7acc767f-da7a-4e39-90e2-f504c5b40827-kube-api-access-g9qjc\") pod \"community-operators-xw5wc\" (UID: \"7acc767f-da7a-4e39-90e2-f504c5b40827\") " pod="openshift-marketplace/community-operators-xw5wc" Oct 03 09:14:35 crc kubenswrapper[4899]: I1003 09:14:35.198273 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7acc767f-da7a-4e39-90e2-f504c5b40827-utilities\") pod \"community-operators-xw5wc\" (UID: \"7acc767f-da7a-4e39-90e2-f504c5b40827\") " pod="openshift-marketplace/community-operators-xw5wc" Oct 03 09:14:35 crc kubenswrapper[4899]: I1003 09:14:35.198415 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7acc767f-da7a-4e39-90e2-f504c5b40827-catalog-content\") pod \"community-operators-xw5wc\" (UID: \"7acc767f-da7a-4e39-90e2-f504c5b40827\") " pod="openshift-marketplace/community-operators-xw5wc" Oct 03 09:14:35 crc kubenswrapper[4899]: I1003 09:14:35.224295 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9qjc\" (UniqueName: \"kubernetes.io/projected/7acc767f-da7a-4e39-90e2-f504c5b40827-kube-api-access-g9qjc\") pod \"community-operators-xw5wc\" (UID: \"7acc767f-da7a-4e39-90e2-f504c5b40827\") " pod="openshift-marketplace/community-operators-xw5wc" Oct 03 09:14:35 crc kubenswrapper[4899]: I1003 09:14:35.408914 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xw5wc" Oct 03 09:14:35 crc kubenswrapper[4899]: I1003 09:14:35.964926 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xw5wc"] Oct 03 09:14:36 crc kubenswrapper[4899]: I1003 09:14:36.679366 4899 generic.go:334] "Generic (PLEG): container finished" podID="7acc767f-da7a-4e39-90e2-f504c5b40827" containerID="8f44afbd2322077f7b9613ca55c2f22fffdc7d2f5356290cb62155c3d66e27e3" exitCode=0 Oct 03 09:14:36 crc kubenswrapper[4899]: I1003 09:14:36.679433 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xw5wc" event={"ID":"7acc767f-da7a-4e39-90e2-f504c5b40827","Type":"ContainerDied","Data":"8f44afbd2322077f7b9613ca55c2f22fffdc7d2f5356290cb62155c3d66e27e3"} Oct 03 09:14:36 crc kubenswrapper[4899]: I1003 09:14:36.679636 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xw5wc" event={"ID":"7acc767f-da7a-4e39-90e2-f504c5b40827","Type":"ContainerStarted","Data":"66952f3c404ded84f2510fcd022b6b1b8435dd756efb8c9a394164845f822db8"} Oct 03 09:14:40 crc kubenswrapper[4899]: I1003 09:14:40.716790 4899 generic.go:334] "Generic (PLEG): container finished" podID="7acc767f-da7a-4e39-90e2-f504c5b40827" containerID="68f3ff157334b6d49ec8c02d013da669d850f8c766558ddb25d1f94c83042eb1" exitCode=0 Oct 03 09:14:40 crc kubenswrapper[4899]: I1003 09:14:40.717470 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xw5wc" event={"ID":"7acc767f-da7a-4e39-90e2-f504c5b40827","Type":"ContainerDied","Data":"68f3ff157334b6d49ec8c02d013da669d850f8c766558ddb25d1f94c83042eb1"} Oct 03 09:14:41 crc kubenswrapper[4899]: I1003 09:14:41.727687 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xw5wc" event={"ID":"7acc767f-da7a-4e39-90e2-f504c5b40827","Type":"ContainerStarted","Data":"ea264bba41e589c233410c1ff6016da9f0366c603d867fc8b04bf3b7e23dfe4e"} Oct 03 09:14:41 crc kubenswrapper[4899]: I1003 09:14:41.755652 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xw5wc" podStartSLOduration=2.268112103 podStartE2EDuration="6.755632088s" podCreationTimestamp="2025-10-03 09:14:35 +0000 UTC" firstStartedPulling="2025-10-03 09:14:36.681919505 +0000 UTC m=+2050.789404458" lastFinishedPulling="2025-10-03 09:14:41.16943949 +0000 UTC m=+2055.276924443" observedRunningTime="2025-10-03 09:14:41.747137155 +0000 UTC m=+2055.854622108" watchObservedRunningTime="2025-10-03 09:14:41.755632088 +0000 UTC m=+2055.863117041" Oct 03 09:14:45 crc kubenswrapper[4899]: I1003 09:14:45.409310 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xw5wc" Oct 03 09:14:45 crc kubenswrapper[4899]: I1003 09:14:45.412132 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xw5wc" Oct 03 09:14:45 crc kubenswrapper[4899]: I1003 09:14:45.457572 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xw5wc" Oct 03 09:14:55 crc kubenswrapper[4899]: I1003 09:14:55.455465 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xw5wc" Oct 03 09:14:55 crc kubenswrapper[4899]: I1003 09:14:55.525079 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xw5wc"] Oct 03 09:14:55 crc kubenswrapper[4899]: I1003 09:14:55.583463 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cpks6"] Oct 03 09:14:55 crc kubenswrapper[4899]: I1003 09:14:55.583739 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cpks6" podUID="2c0c4dce-aba0-4bf6-95bf-a513344e0740" containerName="registry-server" containerID="cri-o://72abfbac36734d4caa0cab4735f19cd856c8b24fea2a2eb319606390defcc618" gracePeriod=2 Oct 03 09:14:55 crc kubenswrapper[4899]: I1003 09:14:55.874012 4899 generic.go:334] "Generic (PLEG): container finished" podID="2c0c4dce-aba0-4bf6-95bf-a513344e0740" containerID="72abfbac36734d4caa0cab4735f19cd856c8b24fea2a2eb319606390defcc618" exitCode=0 Oct 03 09:14:55 crc kubenswrapper[4899]: I1003 09:14:55.875177 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cpks6" event={"ID":"2c0c4dce-aba0-4bf6-95bf-a513344e0740","Type":"ContainerDied","Data":"72abfbac36734d4caa0cab4735f19cd856c8b24fea2a2eb319606390defcc618"} Oct 03 09:14:56 crc kubenswrapper[4899]: I1003 09:14:56.038790 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cpks6" Oct 03 09:14:56 crc kubenswrapper[4899]: I1003 09:14:56.110702 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c0c4dce-aba0-4bf6-95bf-a513344e0740-utilities\") pod \"2c0c4dce-aba0-4bf6-95bf-a513344e0740\" (UID: \"2c0c4dce-aba0-4bf6-95bf-a513344e0740\") " Oct 03 09:14:56 crc kubenswrapper[4899]: I1003 09:14:56.110764 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c0c4dce-aba0-4bf6-95bf-a513344e0740-catalog-content\") pod \"2c0c4dce-aba0-4bf6-95bf-a513344e0740\" (UID: \"2c0c4dce-aba0-4bf6-95bf-a513344e0740\") " Oct 03 09:14:56 crc kubenswrapper[4899]: I1003 09:14:56.110967 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvcbz\" (UniqueName: \"kubernetes.io/projected/2c0c4dce-aba0-4bf6-95bf-a513344e0740-kube-api-access-bvcbz\") pod \"2c0c4dce-aba0-4bf6-95bf-a513344e0740\" (UID: \"2c0c4dce-aba0-4bf6-95bf-a513344e0740\") " Oct 03 09:14:56 crc kubenswrapper[4899]: I1003 09:14:56.111356 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c0c4dce-aba0-4bf6-95bf-a513344e0740-utilities" (OuterVolumeSpecName: "utilities") pod "2c0c4dce-aba0-4bf6-95bf-a513344e0740" (UID: "2c0c4dce-aba0-4bf6-95bf-a513344e0740"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:14:56 crc kubenswrapper[4899]: I1003 09:14:56.121097 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c0c4dce-aba0-4bf6-95bf-a513344e0740-kube-api-access-bvcbz" (OuterVolumeSpecName: "kube-api-access-bvcbz") pod "2c0c4dce-aba0-4bf6-95bf-a513344e0740" (UID: "2c0c4dce-aba0-4bf6-95bf-a513344e0740"). InnerVolumeSpecName "kube-api-access-bvcbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:14:56 crc kubenswrapper[4899]: I1003 09:14:56.155211 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c0c4dce-aba0-4bf6-95bf-a513344e0740-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2c0c4dce-aba0-4bf6-95bf-a513344e0740" (UID: "2c0c4dce-aba0-4bf6-95bf-a513344e0740"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:14:56 crc kubenswrapper[4899]: I1003 09:14:56.212546 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvcbz\" (UniqueName: \"kubernetes.io/projected/2c0c4dce-aba0-4bf6-95bf-a513344e0740-kube-api-access-bvcbz\") on node \"crc\" DevicePath \"\"" Oct 03 09:14:56 crc kubenswrapper[4899]: I1003 09:14:56.212583 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c0c4dce-aba0-4bf6-95bf-a513344e0740-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 09:14:56 crc kubenswrapper[4899]: I1003 09:14:56.212596 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c0c4dce-aba0-4bf6-95bf-a513344e0740-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 09:14:56 crc kubenswrapper[4899]: I1003 09:14:56.886437 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cpks6" event={"ID":"2c0c4dce-aba0-4bf6-95bf-a513344e0740","Type":"ContainerDied","Data":"56fed69288c864275088a1ba487171c04c9fcc20fb7fb56212c338cced01c057"} Oct 03 09:14:56 crc kubenswrapper[4899]: I1003 09:14:56.886682 4899 scope.go:117] "RemoveContainer" containerID="72abfbac36734d4caa0cab4735f19cd856c8b24fea2a2eb319606390defcc618" Oct 03 09:14:56 crc kubenswrapper[4899]: I1003 09:14:56.886478 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cpks6" Oct 03 09:14:56 crc kubenswrapper[4899]: I1003 09:14:56.910012 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cpks6"] Oct 03 09:14:56 crc kubenswrapper[4899]: I1003 09:14:56.920165 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cpks6"] Oct 03 09:14:56 crc kubenswrapper[4899]: I1003 09:14:56.922595 4899 scope.go:117] "RemoveContainer" containerID="fa1a80a43bb844ba17351623dd4eca03e34f2e05fb38156df5784386719236d4" Oct 03 09:14:56 crc kubenswrapper[4899]: I1003 09:14:56.971543 4899 scope.go:117] "RemoveContainer" containerID="e170abda9552febfafb84ba9f2a689f2d17f17ee0a71beae2ab438fb9fff54c2" Oct 03 09:14:58 crc kubenswrapper[4899]: I1003 09:14:58.537672 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c0c4dce-aba0-4bf6-95bf-a513344e0740" path="/var/lib/kubelet/pods/2c0c4dce-aba0-4bf6-95bf-a513344e0740/volumes" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.150473 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr"] Oct 03 09:15:00 crc kubenswrapper[4899]: E1003 09:15:00.151081 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c0c4dce-aba0-4bf6-95bf-a513344e0740" containerName="extract-content" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.151101 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c0c4dce-aba0-4bf6-95bf-a513344e0740" containerName="extract-content" Oct 03 09:15:00 crc kubenswrapper[4899]: E1003 09:15:00.151144 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c0c4dce-aba0-4bf6-95bf-a513344e0740" containerName="registry-server" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.151153 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c0c4dce-aba0-4bf6-95bf-a513344e0740" containerName="registry-server" Oct 03 09:15:00 crc kubenswrapper[4899]: E1003 09:15:00.151177 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c0c4dce-aba0-4bf6-95bf-a513344e0740" containerName="extract-utilities" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.151207 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c0c4dce-aba0-4bf6-95bf-a513344e0740" containerName="extract-utilities" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.151482 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c0c4dce-aba0-4bf6-95bf-a513344e0740" containerName="registry-server" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.152375 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.156137 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.156181 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.206328 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr"] Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.310031 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e9c2a19-1030-443e-adc2-e003dc150280-secret-volume\") pod \"collect-profiles-29324715-fnmvr\" (UID: \"7e9c2a19-1030-443e-adc2-e003dc150280\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.310994 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e9c2a19-1030-443e-adc2-e003dc150280-config-volume\") pod \"collect-profiles-29324715-fnmvr\" (UID: \"7e9c2a19-1030-443e-adc2-e003dc150280\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.311243 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b85nr\" (UniqueName: \"kubernetes.io/projected/7e9c2a19-1030-443e-adc2-e003dc150280-kube-api-access-b85nr\") pod \"collect-profiles-29324715-fnmvr\" (UID: \"7e9c2a19-1030-443e-adc2-e003dc150280\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.412972 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e9c2a19-1030-443e-adc2-e003dc150280-secret-volume\") pod \"collect-profiles-29324715-fnmvr\" (UID: \"7e9c2a19-1030-443e-adc2-e003dc150280\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.413025 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e9c2a19-1030-443e-adc2-e003dc150280-config-volume\") pod \"collect-profiles-29324715-fnmvr\" (UID: \"7e9c2a19-1030-443e-adc2-e003dc150280\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.413076 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b85nr\" (UniqueName: \"kubernetes.io/projected/7e9c2a19-1030-443e-adc2-e003dc150280-kube-api-access-b85nr\") pod \"collect-profiles-29324715-fnmvr\" (UID: \"7e9c2a19-1030-443e-adc2-e003dc150280\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.414340 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e9c2a19-1030-443e-adc2-e003dc150280-config-volume\") pod \"collect-profiles-29324715-fnmvr\" (UID: \"7e9c2a19-1030-443e-adc2-e003dc150280\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.418616 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e9c2a19-1030-443e-adc2-e003dc150280-secret-volume\") pod \"collect-profiles-29324715-fnmvr\" (UID: \"7e9c2a19-1030-443e-adc2-e003dc150280\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.428675 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b85nr\" (UniqueName: \"kubernetes.io/projected/7e9c2a19-1030-443e-adc2-e003dc150280-kube-api-access-b85nr\") pod \"collect-profiles-29324715-fnmvr\" (UID: \"7e9c2a19-1030-443e-adc2-e003dc150280\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.528788 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr" Oct 03 09:15:00 crc kubenswrapper[4899]: I1003 09:15:00.987648 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr"] Oct 03 09:15:00 crc kubenswrapper[4899]: W1003 09:15:00.991276 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7e9c2a19_1030_443e_adc2_e003dc150280.slice/crio-46bada10f4f27c6adc7f9d0f1bd728fd569fa86f459cecadc2c26333977f34ea WatchSource:0}: Error finding container 46bada10f4f27c6adc7f9d0f1bd728fd569fa86f459cecadc2c26333977f34ea: Status 404 returned error can't find the container with id 46bada10f4f27c6adc7f9d0f1bd728fd569fa86f459cecadc2c26333977f34ea Oct 03 09:15:01 crc kubenswrapper[4899]: I1003 09:15:01.933600 4899 generic.go:334] "Generic (PLEG): container finished" podID="7e9c2a19-1030-443e-adc2-e003dc150280" containerID="77f3d19ac4d2528a458721e07308fb50b76a337bacab408b8a781929c5cfa2b7" exitCode=0 Oct 03 09:15:01 crc kubenswrapper[4899]: I1003 09:15:01.933705 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr" event={"ID":"7e9c2a19-1030-443e-adc2-e003dc150280","Type":"ContainerDied","Data":"77f3d19ac4d2528a458721e07308fb50b76a337bacab408b8a781929c5cfa2b7"} Oct 03 09:15:01 crc kubenswrapper[4899]: I1003 09:15:01.934004 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr" event={"ID":"7e9c2a19-1030-443e-adc2-e003dc150280","Type":"ContainerStarted","Data":"46bada10f4f27c6adc7f9d0f1bd728fd569fa86f459cecadc2c26333977f34ea"} Oct 03 09:15:03 crc kubenswrapper[4899]: I1003 09:15:03.282003 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr" Oct 03 09:15:03 crc kubenswrapper[4899]: I1003 09:15:03.363435 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e9c2a19-1030-443e-adc2-e003dc150280-secret-volume\") pod \"7e9c2a19-1030-443e-adc2-e003dc150280\" (UID: \"7e9c2a19-1030-443e-adc2-e003dc150280\") " Oct 03 09:15:03 crc kubenswrapper[4899]: I1003 09:15:03.363912 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e9c2a19-1030-443e-adc2-e003dc150280-config-volume\") pod \"7e9c2a19-1030-443e-adc2-e003dc150280\" (UID: \"7e9c2a19-1030-443e-adc2-e003dc150280\") " Oct 03 09:15:03 crc kubenswrapper[4899]: I1003 09:15:03.364070 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b85nr\" (UniqueName: \"kubernetes.io/projected/7e9c2a19-1030-443e-adc2-e003dc150280-kube-api-access-b85nr\") pod \"7e9c2a19-1030-443e-adc2-e003dc150280\" (UID: \"7e9c2a19-1030-443e-adc2-e003dc150280\") " Oct 03 09:15:03 crc kubenswrapper[4899]: I1003 09:15:03.364546 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e9c2a19-1030-443e-adc2-e003dc150280-config-volume" (OuterVolumeSpecName: "config-volume") pod "7e9c2a19-1030-443e-adc2-e003dc150280" (UID: "7e9c2a19-1030-443e-adc2-e003dc150280"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:15:03 crc kubenswrapper[4899]: I1003 09:15:03.369851 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e9c2a19-1030-443e-adc2-e003dc150280-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7e9c2a19-1030-443e-adc2-e003dc150280" (UID: "7e9c2a19-1030-443e-adc2-e003dc150280"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:15:03 crc kubenswrapper[4899]: I1003 09:15:03.372109 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e9c2a19-1030-443e-adc2-e003dc150280-kube-api-access-b85nr" (OuterVolumeSpecName: "kube-api-access-b85nr") pod "7e9c2a19-1030-443e-adc2-e003dc150280" (UID: "7e9c2a19-1030-443e-adc2-e003dc150280"). InnerVolumeSpecName "kube-api-access-b85nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:15:03 crc kubenswrapper[4899]: I1003 09:15:03.465874 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b85nr\" (UniqueName: \"kubernetes.io/projected/7e9c2a19-1030-443e-adc2-e003dc150280-kube-api-access-b85nr\") on node \"crc\" DevicePath \"\"" Oct 03 09:15:03 crc kubenswrapper[4899]: I1003 09:15:03.465925 4899 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e9c2a19-1030-443e-adc2-e003dc150280-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 09:15:03 crc kubenswrapper[4899]: I1003 09:15:03.465936 4899 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e9c2a19-1030-443e-adc2-e003dc150280-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 09:15:03 crc kubenswrapper[4899]: I1003 09:15:03.953193 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr" event={"ID":"7e9c2a19-1030-443e-adc2-e003dc150280","Type":"ContainerDied","Data":"46bada10f4f27c6adc7f9d0f1bd728fd569fa86f459cecadc2c26333977f34ea"} Oct 03 09:15:03 crc kubenswrapper[4899]: I1003 09:15:03.953238 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46bada10f4f27c6adc7f9d0f1bd728fd569fa86f459cecadc2c26333977f34ea" Oct 03 09:15:03 crc kubenswrapper[4899]: I1003 09:15:03.953298 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324715-fnmvr" Oct 03 09:15:04 crc kubenswrapper[4899]: I1003 09:15:04.349199 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr"] Oct 03 09:15:04 crc kubenswrapper[4899]: I1003 09:15:04.356641 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324670-2j7rr"] Oct 03 09:15:04 crc kubenswrapper[4899]: I1003 09:15:04.536977 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="264bb2e1-d946-4b30-9aa8-48cb9a9447e5" path="/var/lib/kubelet/pods/264bb2e1-d946-4b30-9aa8-48cb9a9447e5/volumes" Oct 03 09:15:19 crc kubenswrapper[4899]: I1003 09:15:19.978629 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-g5mht"] Oct 03 09:15:19 crc kubenswrapper[4899]: E1003 09:15:19.979597 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e9c2a19-1030-443e-adc2-e003dc150280" containerName="collect-profiles" Oct 03 09:15:19 crc kubenswrapper[4899]: I1003 09:15:19.979611 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e9c2a19-1030-443e-adc2-e003dc150280" containerName="collect-profiles" Oct 03 09:15:19 crc kubenswrapper[4899]: I1003 09:15:19.979850 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e9c2a19-1030-443e-adc2-e003dc150280" containerName="collect-profiles" Oct 03 09:15:19 crc kubenswrapper[4899]: I1003 09:15:19.981939 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g5mht" Oct 03 09:15:19 crc kubenswrapper[4899]: I1003 09:15:19.991034 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5mht"] Oct 03 09:15:20 crc kubenswrapper[4899]: I1003 09:15:20.087982 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b4e2c59-fa55-4afd-bf59-a7648e679591-catalog-content\") pod \"redhat-marketplace-g5mht\" (UID: \"7b4e2c59-fa55-4afd-bf59-a7648e679591\") " pod="openshift-marketplace/redhat-marketplace-g5mht" Oct 03 09:15:20 crc kubenswrapper[4899]: I1003 09:15:20.088056 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b4e2c59-fa55-4afd-bf59-a7648e679591-utilities\") pod \"redhat-marketplace-g5mht\" (UID: \"7b4e2c59-fa55-4afd-bf59-a7648e679591\") " pod="openshift-marketplace/redhat-marketplace-g5mht" Oct 03 09:15:20 crc kubenswrapper[4899]: I1003 09:15:20.088289 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tjg4\" (UniqueName: \"kubernetes.io/projected/7b4e2c59-fa55-4afd-bf59-a7648e679591-kube-api-access-7tjg4\") pod \"redhat-marketplace-g5mht\" (UID: \"7b4e2c59-fa55-4afd-bf59-a7648e679591\") " pod="openshift-marketplace/redhat-marketplace-g5mht" Oct 03 09:15:20 crc kubenswrapper[4899]: I1003 09:15:20.190702 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tjg4\" (UniqueName: \"kubernetes.io/projected/7b4e2c59-fa55-4afd-bf59-a7648e679591-kube-api-access-7tjg4\") pod \"redhat-marketplace-g5mht\" (UID: \"7b4e2c59-fa55-4afd-bf59-a7648e679591\") " pod="openshift-marketplace/redhat-marketplace-g5mht" Oct 03 09:15:20 crc kubenswrapper[4899]: I1003 09:15:20.191257 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b4e2c59-fa55-4afd-bf59-a7648e679591-catalog-content\") pod \"redhat-marketplace-g5mht\" (UID: \"7b4e2c59-fa55-4afd-bf59-a7648e679591\") " pod="openshift-marketplace/redhat-marketplace-g5mht" Oct 03 09:15:20 crc kubenswrapper[4899]: I1003 09:15:20.191685 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b4e2c59-fa55-4afd-bf59-a7648e679591-catalog-content\") pod \"redhat-marketplace-g5mht\" (UID: \"7b4e2c59-fa55-4afd-bf59-a7648e679591\") " pod="openshift-marketplace/redhat-marketplace-g5mht" Oct 03 09:15:20 crc kubenswrapper[4899]: I1003 09:15:20.191765 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b4e2c59-fa55-4afd-bf59-a7648e679591-utilities\") pod \"redhat-marketplace-g5mht\" (UID: \"7b4e2c59-fa55-4afd-bf59-a7648e679591\") " pod="openshift-marketplace/redhat-marketplace-g5mht" Oct 03 09:15:20 crc kubenswrapper[4899]: I1003 09:15:20.192039 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b4e2c59-fa55-4afd-bf59-a7648e679591-utilities\") pod \"redhat-marketplace-g5mht\" (UID: \"7b4e2c59-fa55-4afd-bf59-a7648e679591\") " pod="openshift-marketplace/redhat-marketplace-g5mht" Oct 03 09:15:20 crc kubenswrapper[4899]: I1003 09:15:20.212828 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tjg4\" (UniqueName: \"kubernetes.io/projected/7b4e2c59-fa55-4afd-bf59-a7648e679591-kube-api-access-7tjg4\") pod \"redhat-marketplace-g5mht\" (UID: \"7b4e2c59-fa55-4afd-bf59-a7648e679591\") " pod="openshift-marketplace/redhat-marketplace-g5mht" Oct 03 09:15:20 crc kubenswrapper[4899]: I1003 09:15:20.301853 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g5mht" Oct 03 09:15:20 crc kubenswrapper[4899]: I1003 09:15:20.764675 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5mht"] Oct 03 09:15:21 crc kubenswrapper[4899]: I1003 09:15:21.090216 4899 generic.go:334] "Generic (PLEG): container finished" podID="7b4e2c59-fa55-4afd-bf59-a7648e679591" containerID="ff65494511b95007fbb1bf934dcb247f03f03a1bbdfc7458df66e8baec5bf704" exitCode=0 Oct 03 09:15:21 crc kubenswrapper[4899]: I1003 09:15:21.090258 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5mht" event={"ID":"7b4e2c59-fa55-4afd-bf59-a7648e679591","Type":"ContainerDied","Data":"ff65494511b95007fbb1bf934dcb247f03f03a1bbdfc7458df66e8baec5bf704"} Oct 03 09:15:21 crc kubenswrapper[4899]: I1003 09:15:21.090291 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5mht" event={"ID":"7b4e2c59-fa55-4afd-bf59-a7648e679591","Type":"ContainerStarted","Data":"fc6ba3cbe4ae33d30f2c784d7cc6d019b2a4341f1f9111934923a0cf3b472a7a"} Oct 03 09:15:22 crc kubenswrapper[4899]: I1003 09:15:22.100709 4899 generic.go:334] "Generic (PLEG): container finished" podID="7b4e2c59-fa55-4afd-bf59-a7648e679591" containerID="6273886a4d58a970c03c4db61bccb3b0a44820705fcfb9458fec61ddacbe9eb3" exitCode=0 Oct 03 09:15:22 crc kubenswrapper[4899]: I1003 09:15:22.100765 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5mht" event={"ID":"7b4e2c59-fa55-4afd-bf59-a7648e679591","Type":"ContainerDied","Data":"6273886a4d58a970c03c4db61bccb3b0a44820705fcfb9458fec61ddacbe9eb3"} Oct 03 09:15:23 crc kubenswrapper[4899]: I1003 09:15:23.112799 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5mht" event={"ID":"7b4e2c59-fa55-4afd-bf59-a7648e679591","Type":"ContainerStarted","Data":"b7d1c0cb1f7fd2b96192b2af8a951d2b9fffd7fc4babf521b9f4db77e013f47d"} Oct 03 09:15:23 crc kubenswrapper[4899]: I1003 09:15:23.133438 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-g5mht" podStartSLOduration=2.574814991 podStartE2EDuration="4.133422959s" podCreationTimestamp="2025-10-03 09:15:19 +0000 UTC" firstStartedPulling="2025-10-03 09:15:21.091674318 +0000 UTC m=+2095.199159271" lastFinishedPulling="2025-10-03 09:15:22.650282286 +0000 UTC m=+2096.757767239" observedRunningTime="2025-10-03 09:15:23.129631687 +0000 UTC m=+2097.237116640" watchObservedRunningTime="2025-10-03 09:15:23.133422959 +0000 UTC m=+2097.240907912" Oct 03 09:15:30 crc kubenswrapper[4899]: I1003 09:15:30.302529 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-g5mht" Oct 03 09:15:30 crc kubenswrapper[4899]: I1003 09:15:30.303376 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-g5mht" Oct 03 09:15:30 crc kubenswrapper[4899]: I1003 09:15:30.348436 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-g5mht" Oct 03 09:15:31 crc kubenswrapper[4899]: I1003 09:15:31.226259 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-g5mht" Oct 03 09:15:31 crc kubenswrapper[4899]: I1003 09:15:31.283810 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5mht"] Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.198068 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-g5mht" podUID="7b4e2c59-fa55-4afd-bf59-a7648e679591" containerName="registry-server" containerID="cri-o://b7d1c0cb1f7fd2b96192b2af8a951d2b9fffd7fc4babf521b9f4db77e013f47d" gracePeriod=2 Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.308872 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fdt7h"] Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.312377 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fdt7h" Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.319825 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fdt7h"] Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.463924 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/375e78f5-be3a-4196-b4d2-ed13af00e588-catalog-content\") pod \"certified-operators-fdt7h\" (UID: \"375e78f5-be3a-4196-b4d2-ed13af00e588\") " pod="openshift-marketplace/certified-operators-fdt7h" Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.463989 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/375e78f5-be3a-4196-b4d2-ed13af00e588-utilities\") pod \"certified-operators-fdt7h\" (UID: \"375e78f5-be3a-4196-b4d2-ed13af00e588\") " pod="openshift-marketplace/certified-operators-fdt7h" Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.464020 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfdr5\" (UniqueName: \"kubernetes.io/projected/375e78f5-be3a-4196-b4d2-ed13af00e588-kube-api-access-zfdr5\") pod \"certified-operators-fdt7h\" (UID: \"375e78f5-be3a-4196-b4d2-ed13af00e588\") " pod="openshift-marketplace/certified-operators-fdt7h" Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.566344 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/375e78f5-be3a-4196-b4d2-ed13af00e588-catalog-content\") pod \"certified-operators-fdt7h\" (UID: \"375e78f5-be3a-4196-b4d2-ed13af00e588\") " pod="openshift-marketplace/certified-operators-fdt7h" Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.566828 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/375e78f5-be3a-4196-b4d2-ed13af00e588-utilities\") pod \"certified-operators-fdt7h\" (UID: \"375e78f5-be3a-4196-b4d2-ed13af00e588\") " pod="openshift-marketplace/certified-operators-fdt7h" Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.566858 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfdr5\" (UniqueName: \"kubernetes.io/projected/375e78f5-be3a-4196-b4d2-ed13af00e588-kube-api-access-zfdr5\") pod \"certified-operators-fdt7h\" (UID: \"375e78f5-be3a-4196-b4d2-ed13af00e588\") " pod="openshift-marketplace/certified-operators-fdt7h" Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.566961 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/375e78f5-be3a-4196-b4d2-ed13af00e588-catalog-content\") pod \"certified-operators-fdt7h\" (UID: \"375e78f5-be3a-4196-b4d2-ed13af00e588\") " pod="openshift-marketplace/certified-operators-fdt7h" Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.567350 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/375e78f5-be3a-4196-b4d2-ed13af00e588-utilities\") pod \"certified-operators-fdt7h\" (UID: \"375e78f5-be3a-4196-b4d2-ed13af00e588\") " pod="openshift-marketplace/certified-operators-fdt7h" Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.595240 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfdr5\" (UniqueName: \"kubernetes.io/projected/375e78f5-be3a-4196-b4d2-ed13af00e588-kube-api-access-zfdr5\") pod \"certified-operators-fdt7h\" (UID: \"375e78f5-be3a-4196-b4d2-ed13af00e588\") " pod="openshift-marketplace/certified-operators-fdt7h" Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.674397 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fdt7h" Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.684014 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g5mht" Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.772811 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b4e2c59-fa55-4afd-bf59-a7648e679591-catalog-content\") pod \"7b4e2c59-fa55-4afd-bf59-a7648e679591\" (UID: \"7b4e2c59-fa55-4afd-bf59-a7648e679591\") " Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.772868 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7tjg4\" (UniqueName: \"kubernetes.io/projected/7b4e2c59-fa55-4afd-bf59-a7648e679591-kube-api-access-7tjg4\") pod \"7b4e2c59-fa55-4afd-bf59-a7648e679591\" (UID: \"7b4e2c59-fa55-4afd-bf59-a7648e679591\") " Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.772995 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b4e2c59-fa55-4afd-bf59-a7648e679591-utilities\") pod \"7b4e2c59-fa55-4afd-bf59-a7648e679591\" (UID: \"7b4e2c59-fa55-4afd-bf59-a7648e679591\") " Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.774149 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b4e2c59-fa55-4afd-bf59-a7648e679591-utilities" (OuterVolumeSpecName: "utilities") pod "7b4e2c59-fa55-4afd-bf59-a7648e679591" (UID: "7b4e2c59-fa55-4afd-bf59-a7648e679591"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.779127 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b4e2c59-fa55-4afd-bf59-a7648e679591-kube-api-access-7tjg4" (OuterVolumeSpecName: "kube-api-access-7tjg4") pod "7b4e2c59-fa55-4afd-bf59-a7648e679591" (UID: "7b4e2c59-fa55-4afd-bf59-a7648e679591"). InnerVolumeSpecName "kube-api-access-7tjg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.796450 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b4e2c59-fa55-4afd-bf59-a7648e679591-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7b4e2c59-fa55-4afd-bf59-a7648e679591" (UID: "7b4e2c59-fa55-4afd-bf59-a7648e679591"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.875331 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b4e2c59-fa55-4afd-bf59-a7648e679591-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.875592 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7tjg4\" (UniqueName: \"kubernetes.io/projected/7b4e2c59-fa55-4afd-bf59-a7648e679591-kube-api-access-7tjg4\") on node \"crc\" DevicePath \"\"" Oct 03 09:15:33 crc kubenswrapper[4899]: I1003 09:15:33.875685 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b4e2c59-fa55-4afd-bf59-a7648e679591-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 09:15:34 crc kubenswrapper[4899]: I1003 09:15:34.207247 4899 generic.go:334] "Generic (PLEG): container finished" podID="7b4e2c59-fa55-4afd-bf59-a7648e679591" containerID="b7d1c0cb1f7fd2b96192b2af8a951d2b9fffd7fc4babf521b9f4db77e013f47d" exitCode=0 Oct 03 09:15:34 crc kubenswrapper[4899]: I1003 09:15:34.207567 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5mht" event={"ID":"7b4e2c59-fa55-4afd-bf59-a7648e679591","Type":"ContainerDied","Data":"b7d1c0cb1f7fd2b96192b2af8a951d2b9fffd7fc4babf521b9f4db77e013f47d"} Oct 03 09:15:34 crc kubenswrapper[4899]: I1003 09:15:34.207596 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5mht" event={"ID":"7b4e2c59-fa55-4afd-bf59-a7648e679591","Type":"ContainerDied","Data":"fc6ba3cbe4ae33d30f2c784d7cc6d019b2a4341f1f9111934923a0cf3b472a7a"} Oct 03 09:15:34 crc kubenswrapper[4899]: I1003 09:15:34.207614 4899 scope.go:117] "RemoveContainer" containerID="b7d1c0cb1f7fd2b96192b2af8a951d2b9fffd7fc4babf521b9f4db77e013f47d" Oct 03 09:15:34 crc kubenswrapper[4899]: I1003 09:15:34.207729 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g5mht" Oct 03 09:15:34 crc kubenswrapper[4899]: I1003 09:15:34.228311 4899 scope.go:117] "RemoveContainer" containerID="6273886a4d58a970c03c4db61bccb3b0a44820705fcfb9458fec61ddacbe9eb3" Oct 03 09:15:34 crc kubenswrapper[4899]: I1003 09:15:34.241017 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5mht"] Oct 03 09:15:34 crc kubenswrapper[4899]: I1003 09:15:34.249726 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5mht"] Oct 03 09:15:34 crc kubenswrapper[4899]: I1003 09:15:34.266563 4899 scope.go:117] "RemoveContainer" containerID="ff65494511b95007fbb1bf934dcb247f03f03a1bbdfc7458df66e8baec5bf704" Oct 03 09:15:34 crc kubenswrapper[4899]: I1003 09:15:34.268700 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fdt7h"] Oct 03 09:15:34 crc kubenswrapper[4899]: W1003 09:15:34.272040 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod375e78f5_be3a_4196_b4d2_ed13af00e588.slice/crio-050c520c738a84622d943a9e0606dfcbcac67626b5911f20d3b45009868015fb WatchSource:0}: Error finding container 050c520c738a84622d943a9e0606dfcbcac67626b5911f20d3b45009868015fb: Status 404 returned error can't find the container with id 050c520c738a84622d943a9e0606dfcbcac67626b5911f20d3b45009868015fb Oct 03 09:15:34 crc kubenswrapper[4899]: I1003 09:15:34.284613 4899 scope.go:117] "RemoveContainer" containerID="b7d1c0cb1f7fd2b96192b2af8a951d2b9fffd7fc4babf521b9f4db77e013f47d" Oct 03 09:15:34 crc kubenswrapper[4899]: E1003 09:15:34.284917 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7d1c0cb1f7fd2b96192b2af8a951d2b9fffd7fc4babf521b9f4db77e013f47d\": container with ID starting with b7d1c0cb1f7fd2b96192b2af8a951d2b9fffd7fc4babf521b9f4db77e013f47d not found: ID does not exist" containerID="b7d1c0cb1f7fd2b96192b2af8a951d2b9fffd7fc4babf521b9f4db77e013f47d" Oct 03 09:15:34 crc kubenswrapper[4899]: I1003 09:15:34.284946 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7d1c0cb1f7fd2b96192b2af8a951d2b9fffd7fc4babf521b9f4db77e013f47d"} err="failed to get container status \"b7d1c0cb1f7fd2b96192b2af8a951d2b9fffd7fc4babf521b9f4db77e013f47d\": rpc error: code = NotFound desc = could not find container \"b7d1c0cb1f7fd2b96192b2af8a951d2b9fffd7fc4babf521b9f4db77e013f47d\": container with ID starting with b7d1c0cb1f7fd2b96192b2af8a951d2b9fffd7fc4babf521b9f4db77e013f47d not found: ID does not exist" Oct 03 09:15:34 crc kubenswrapper[4899]: I1003 09:15:34.284967 4899 scope.go:117] "RemoveContainer" containerID="6273886a4d58a970c03c4db61bccb3b0a44820705fcfb9458fec61ddacbe9eb3" Oct 03 09:15:34 crc kubenswrapper[4899]: E1003 09:15:34.285253 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6273886a4d58a970c03c4db61bccb3b0a44820705fcfb9458fec61ddacbe9eb3\": container with ID starting with 6273886a4d58a970c03c4db61bccb3b0a44820705fcfb9458fec61ddacbe9eb3 not found: ID does not exist" containerID="6273886a4d58a970c03c4db61bccb3b0a44820705fcfb9458fec61ddacbe9eb3" Oct 03 09:15:34 crc kubenswrapper[4899]: I1003 09:15:34.285320 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6273886a4d58a970c03c4db61bccb3b0a44820705fcfb9458fec61ddacbe9eb3"} err="failed to get container status \"6273886a4d58a970c03c4db61bccb3b0a44820705fcfb9458fec61ddacbe9eb3\": rpc error: code = NotFound desc = could not find container \"6273886a4d58a970c03c4db61bccb3b0a44820705fcfb9458fec61ddacbe9eb3\": container with ID starting with 6273886a4d58a970c03c4db61bccb3b0a44820705fcfb9458fec61ddacbe9eb3 not found: ID does not exist" Oct 03 09:15:34 crc kubenswrapper[4899]: I1003 09:15:34.285357 4899 scope.go:117] "RemoveContainer" containerID="ff65494511b95007fbb1bf934dcb247f03f03a1bbdfc7458df66e8baec5bf704" Oct 03 09:15:34 crc kubenswrapper[4899]: E1003 09:15:34.285632 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff65494511b95007fbb1bf934dcb247f03f03a1bbdfc7458df66e8baec5bf704\": container with ID starting with ff65494511b95007fbb1bf934dcb247f03f03a1bbdfc7458df66e8baec5bf704 not found: ID does not exist" containerID="ff65494511b95007fbb1bf934dcb247f03f03a1bbdfc7458df66e8baec5bf704" Oct 03 09:15:34 crc kubenswrapper[4899]: I1003 09:15:34.285678 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff65494511b95007fbb1bf934dcb247f03f03a1bbdfc7458df66e8baec5bf704"} err="failed to get container status \"ff65494511b95007fbb1bf934dcb247f03f03a1bbdfc7458df66e8baec5bf704\": rpc error: code = NotFound desc = could not find container \"ff65494511b95007fbb1bf934dcb247f03f03a1bbdfc7458df66e8baec5bf704\": container with ID starting with ff65494511b95007fbb1bf934dcb247f03f03a1bbdfc7458df66e8baec5bf704 not found: ID does not exist" Oct 03 09:15:34 crc kubenswrapper[4899]: I1003 09:15:34.539870 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b4e2c59-fa55-4afd-bf59-a7648e679591" path="/var/lib/kubelet/pods/7b4e2c59-fa55-4afd-bf59-a7648e679591/volumes" Oct 03 09:15:35 crc kubenswrapper[4899]: I1003 09:15:35.220947 4899 generic.go:334] "Generic (PLEG): container finished" podID="375e78f5-be3a-4196-b4d2-ed13af00e588" containerID="e6984666b8f9bde7fc3e383ee47496eac9d1f6852b66824dbbfa92476cf9a02d" exitCode=0 Oct 03 09:15:35 crc kubenswrapper[4899]: I1003 09:15:35.221009 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fdt7h" event={"ID":"375e78f5-be3a-4196-b4d2-ed13af00e588","Type":"ContainerDied","Data":"e6984666b8f9bde7fc3e383ee47496eac9d1f6852b66824dbbfa92476cf9a02d"} Oct 03 09:15:35 crc kubenswrapper[4899]: I1003 09:15:35.221580 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fdt7h" event={"ID":"375e78f5-be3a-4196-b4d2-ed13af00e588","Type":"ContainerStarted","Data":"050c520c738a84622d943a9e0606dfcbcac67626b5911f20d3b45009868015fb"} Oct 03 09:15:36 crc kubenswrapper[4899]: I1003 09:15:36.235147 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fdt7h" event={"ID":"375e78f5-be3a-4196-b4d2-ed13af00e588","Type":"ContainerDied","Data":"7dadf8e2f7741cf56ae7d66ad25eac613423e544745a15e9651840a8bc7f9c21"} Oct 03 09:15:36 crc kubenswrapper[4899]: I1003 09:15:36.235174 4899 generic.go:334] "Generic (PLEG): container finished" podID="375e78f5-be3a-4196-b4d2-ed13af00e588" containerID="7dadf8e2f7741cf56ae7d66ad25eac613423e544745a15e9651840a8bc7f9c21" exitCode=0 Oct 03 09:15:37 crc kubenswrapper[4899]: I1003 09:15:37.245485 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fdt7h" event={"ID":"375e78f5-be3a-4196-b4d2-ed13af00e588","Type":"ContainerStarted","Data":"6814d0e379f048ff40b1c868927a2fe3912f677359d58bdebf6732c37c408aad"} Oct 03 09:15:37 crc kubenswrapper[4899]: I1003 09:15:37.286152 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fdt7h" podStartSLOduration=2.652759317 podStartE2EDuration="4.286134515s" podCreationTimestamp="2025-10-03 09:15:33 +0000 UTC" firstStartedPulling="2025-10-03 09:15:35.22398722 +0000 UTC m=+2109.331472173" lastFinishedPulling="2025-10-03 09:15:36.857362418 +0000 UTC m=+2110.964847371" observedRunningTime="2025-10-03 09:15:37.280325429 +0000 UTC m=+2111.387810382" watchObservedRunningTime="2025-10-03 09:15:37.286134515 +0000 UTC m=+2111.393619468" Oct 03 09:15:37 crc kubenswrapper[4899]: I1003 09:15:37.988512 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pp9zj"] Oct 03 09:15:37 crc kubenswrapper[4899]: E1003 09:15:37.988920 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b4e2c59-fa55-4afd-bf59-a7648e679591" containerName="extract-content" Oct 03 09:15:37 crc kubenswrapper[4899]: I1003 09:15:37.988937 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b4e2c59-fa55-4afd-bf59-a7648e679591" containerName="extract-content" Oct 03 09:15:37 crc kubenswrapper[4899]: E1003 09:15:37.988956 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b4e2c59-fa55-4afd-bf59-a7648e679591" containerName="registry-server" Oct 03 09:15:37 crc kubenswrapper[4899]: I1003 09:15:37.988963 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b4e2c59-fa55-4afd-bf59-a7648e679591" containerName="registry-server" Oct 03 09:15:37 crc kubenswrapper[4899]: E1003 09:15:37.988988 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b4e2c59-fa55-4afd-bf59-a7648e679591" containerName="extract-utilities" Oct 03 09:15:37 crc kubenswrapper[4899]: I1003 09:15:37.988994 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b4e2c59-fa55-4afd-bf59-a7648e679591" containerName="extract-utilities" Oct 03 09:15:37 crc kubenswrapper[4899]: I1003 09:15:37.989179 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b4e2c59-fa55-4afd-bf59-a7648e679591" containerName="registry-server" Oct 03 09:15:37 crc kubenswrapper[4899]: I1003 09:15:37.990589 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pp9zj" Oct 03 09:15:38 crc kubenswrapper[4899]: I1003 09:15:38.003623 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pp9zj"] Oct 03 09:15:38 crc kubenswrapper[4899]: I1003 09:15:38.145645 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b44af5e6-263c-42bf-a316-572253523719-catalog-content\") pod \"redhat-operators-pp9zj\" (UID: \"b44af5e6-263c-42bf-a316-572253523719\") " pod="openshift-marketplace/redhat-operators-pp9zj" Oct 03 09:15:38 crc kubenswrapper[4899]: I1003 09:15:38.145738 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w88b9\" (UniqueName: \"kubernetes.io/projected/b44af5e6-263c-42bf-a316-572253523719-kube-api-access-w88b9\") pod \"redhat-operators-pp9zj\" (UID: \"b44af5e6-263c-42bf-a316-572253523719\") " pod="openshift-marketplace/redhat-operators-pp9zj" Oct 03 09:15:38 crc kubenswrapper[4899]: I1003 09:15:38.145771 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b44af5e6-263c-42bf-a316-572253523719-utilities\") pod \"redhat-operators-pp9zj\" (UID: \"b44af5e6-263c-42bf-a316-572253523719\") " pod="openshift-marketplace/redhat-operators-pp9zj" Oct 03 09:15:38 crc kubenswrapper[4899]: I1003 09:15:38.247463 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b44af5e6-263c-42bf-a316-572253523719-catalog-content\") pod \"redhat-operators-pp9zj\" (UID: \"b44af5e6-263c-42bf-a316-572253523719\") " pod="openshift-marketplace/redhat-operators-pp9zj" Oct 03 09:15:38 crc kubenswrapper[4899]: I1003 09:15:38.247545 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w88b9\" (UniqueName: \"kubernetes.io/projected/b44af5e6-263c-42bf-a316-572253523719-kube-api-access-w88b9\") pod \"redhat-operators-pp9zj\" (UID: \"b44af5e6-263c-42bf-a316-572253523719\") " pod="openshift-marketplace/redhat-operators-pp9zj" Oct 03 09:15:38 crc kubenswrapper[4899]: I1003 09:15:38.247573 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b44af5e6-263c-42bf-a316-572253523719-utilities\") pod \"redhat-operators-pp9zj\" (UID: \"b44af5e6-263c-42bf-a316-572253523719\") " pod="openshift-marketplace/redhat-operators-pp9zj" Oct 03 09:15:38 crc kubenswrapper[4899]: I1003 09:15:38.248410 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b44af5e6-263c-42bf-a316-572253523719-utilities\") pod \"redhat-operators-pp9zj\" (UID: \"b44af5e6-263c-42bf-a316-572253523719\") " pod="openshift-marketplace/redhat-operators-pp9zj" Oct 03 09:15:38 crc kubenswrapper[4899]: I1003 09:15:38.248460 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b44af5e6-263c-42bf-a316-572253523719-catalog-content\") pod \"redhat-operators-pp9zj\" (UID: \"b44af5e6-263c-42bf-a316-572253523719\") " pod="openshift-marketplace/redhat-operators-pp9zj" Oct 03 09:15:38 crc kubenswrapper[4899]: I1003 09:15:38.278670 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w88b9\" (UniqueName: \"kubernetes.io/projected/b44af5e6-263c-42bf-a316-572253523719-kube-api-access-w88b9\") pod \"redhat-operators-pp9zj\" (UID: \"b44af5e6-263c-42bf-a316-572253523719\") " pod="openshift-marketplace/redhat-operators-pp9zj" Oct 03 09:15:38 crc kubenswrapper[4899]: I1003 09:15:38.306014 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pp9zj" Oct 03 09:15:38 crc kubenswrapper[4899]: I1003 09:15:38.746753 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pp9zj"] Oct 03 09:15:39 crc kubenswrapper[4899]: I1003 09:15:39.264734 4899 generic.go:334] "Generic (PLEG): container finished" podID="b44af5e6-263c-42bf-a316-572253523719" containerID="89e9dc6dc866c4e4af178cfae42de437a981a8bb816e497213d60b68e7a640b3" exitCode=0 Oct 03 09:15:39 crc kubenswrapper[4899]: I1003 09:15:39.264776 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pp9zj" event={"ID":"b44af5e6-263c-42bf-a316-572253523719","Type":"ContainerDied","Data":"89e9dc6dc866c4e4af178cfae42de437a981a8bb816e497213d60b68e7a640b3"} Oct 03 09:15:39 crc kubenswrapper[4899]: I1003 09:15:39.264802 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pp9zj" event={"ID":"b44af5e6-263c-42bf-a316-572253523719","Type":"ContainerStarted","Data":"e5b363f3ac644603b0707d76cf98503196c77c3086385f12fa14854c60d4a92e"} Oct 03 09:15:41 crc kubenswrapper[4899]: I1003 09:15:41.283017 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pp9zj" event={"ID":"b44af5e6-263c-42bf-a316-572253523719","Type":"ContainerStarted","Data":"484cbc85e4f1f7f995e771c9e0e4cb6c84db4a6e78d621d28a362e7671672878"} Oct 03 09:15:43 crc kubenswrapper[4899]: I1003 09:15:43.304761 4899 generic.go:334] "Generic (PLEG): container finished" podID="b44af5e6-263c-42bf-a316-572253523719" containerID="484cbc85e4f1f7f995e771c9e0e4cb6c84db4a6e78d621d28a362e7671672878" exitCode=0 Oct 03 09:15:43 crc kubenswrapper[4899]: I1003 09:15:43.304881 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pp9zj" event={"ID":"b44af5e6-263c-42bf-a316-572253523719","Type":"ContainerDied","Data":"484cbc85e4f1f7f995e771c9e0e4cb6c84db4a6e78d621d28a362e7671672878"} Oct 03 09:15:43 crc kubenswrapper[4899]: I1003 09:15:43.676176 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fdt7h" Oct 03 09:15:43 crc kubenswrapper[4899]: I1003 09:15:43.676259 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fdt7h" Oct 03 09:15:43 crc kubenswrapper[4899]: I1003 09:15:43.731442 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fdt7h" Oct 03 09:15:44 crc kubenswrapper[4899]: I1003 09:15:44.316061 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pp9zj" event={"ID":"b44af5e6-263c-42bf-a316-572253523719","Type":"ContainerStarted","Data":"1db0a7c0f0ca0d5f39f3d0b9b6eaa8cc6a4e2fd676e0066a40915c1640673d40"} Oct 03 09:15:44 crc kubenswrapper[4899]: I1003 09:15:44.335225 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pp9zj" podStartSLOduration=2.784710564 podStartE2EDuration="7.335208669s" podCreationTimestamp="2025-10-03 09:15:37 +0000 UTC" firstStartedPulling="2025-10-03 09:15:39.267089195 +0000 UTC m=+2113.374574148" lastFinishedPulling="2025-10-03 09:15:43.8175873 +0000 UTC m=+2117.925072253" observedRunningTime="2025-10-03 09:15:44.332148401 +0000 UTC m=+2118.439633354" watchObservedRunningTime="2025-10-03 09:15:44.335208669 +0000 UTC m=+2118.442693622" Oct 03 09:15:44 crc kubenswrapper[4899]: I1003 09:15:44.365046 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fdt7h" Oct 03 09:15:46 crc kubenswrapper[4899]: I1003 09:15:46.380312 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fdt7h"] Oct 03 09:15:46 crc kubenswrapper[4899]: I1003 09:15:46.380530 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fdt7h" podUID="375e78f5-be3a-4196-b4d2-ed13af00e588" containerName="registry-server" containerID="cri-o://6814d0e379f048ff40b1c868927a2fe3912f677359d58bdebf6732c37c408aad" gracePeriod=2 Oct 03 09:15:46 crc kubenswrapper[4899]: I1003 09:15:46.815530 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fdt7h" Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.015746 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/375e78f5-be3a-4196-b4d2-ed13af00e588-utilities\") pod \"375e78f5-be3a-4196-b4d2-ed13af00e588\" (UID: \"375e78f5-be3a-4196-b4d2-ed13af00e588\") " Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.016916 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfdr5\" (UniqueName: \"kubernetes.io/projected/375e78f5-be3a-4196-b4d2-ed13af00e588-kube-api-access-zfdr5\") pod \"375e78f5-be3a-4196-b4d2-ed13af00e588\" (UID: \"375e78f5-be3a-4196-b4d2-ed13af00e588\") " Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.017778 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/375e78f5-be3a-4196-b4d2-ed13af00e588-catalog-content\") pod \"375e78f5-be3a-4196-b4d2-ed13af00e588\" (UID: \"375e78f5-be3a-4196-b4d2-ed13af00e588\") " Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.016849 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/375e78f5-be3a-4196-b4d2-ed13af00e588-utilities" (OuterVolumeSpecName: "utilities") pod "375e78f5-be3a-4196-b4d2-ed13af00e588" (UID: "375e78f5-be3a-4196-b4d2-ed13af00e588"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.018649 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/375e78f5-be3a-4196-b4d2-ed13af00e588-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.022151 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/375e78f5-be3a-4196-b4d2-ed13af00e588-kube-api-access-zfdr5" (OuterVolumeSpecName: "kube-api-access-zfdr5") pod "375e78f5-be3a-4196-b4d2-ed13af00e588" (UID: "375e78f5-be3a-4196-b4d2-ed13af00e588"). InnerVolumeSpecName "kube-api-access-zfdr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.060271 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/375e78f5-be3a-4196-b4d2-ed13af00e588-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "375e78f5-be3a-4196-b4d2-ed13af00e588" (UID: "375e78f5-be3a-4196-b4d2-ed13af00e588"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.121993 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/375e78f5-be3a-4196-b4d2-ed13af00e588-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.123722 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfdr5\" (UniqueName: \"kubernetes.io/projected/375e78f5-be3a-4196-b4d2-ed13af00e588-kube-api-access-zfdr5\") on node \"crc\" DevicePath \"\"" Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.341349 4899 generic.go:334] "Generic (PLEG): container finished" podID="375e78f5-be3a-4196-b4d2-ed13af00e588" containerID="6814d0e379f048ff40b1c868927a2fe3912f677359d58bdebf6732c37c408aad" exitCode=0 Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.341397 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fdt7h" event={"ID":"375e78f5-be3a-4196-b4d2-ed13af00e588","Type":"ContainerDied","Data":"6814d0e379f048ff40b1c868927a2fe3912f677359d58bdebf6732c37c408aad"} Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.341431 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fdt7h" event={"ID":"375e78f5-be3a-4196-b4d2-ed13af00e588","Type":"ContainerDied","Data":"050c520c738a84622d943a9e0606dfcbcac67626b5911f20d3b45009868015fb"} Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.341452 4899 scope.go:117] "RemoveContainer" containerID="6814d0e379f048ff40b1c868927a2fe3912f677359d58bdebf6732c37c408aad" Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.341454 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fdt7h" Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.393651 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fdt7h"] Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.395419 4899 scope.go:117] "RemoveContainer" containerID="7dadf8e2f7741cf56ae7d66ad25eac613423e544745a15e9651840a8bc7f9c21" Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.402130 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fdt7h"] Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.428054 4899 scope.go:117] "RemoveContainer" containerID="e6984666b8f9bde7fc3e383ee47496eac9d1f6852b66824dbbfa92476cf9a02d" Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.479241 4899 scope.go:117] "RemoveContainer" containerID="6814d0e379f048ff40b1c868927a2fe3912f677359d58bdebf6732c37c408aad" Oct 03 09:15:47 crc kubenswrapper[4899]: E1003 09:15:47.479967 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6814d0e379f048ff40b1c868927a2fe3912f677359d58bdebf6732c37c408aad\": container with ID starting with 6814d0e379f048ff40b1c868927a2fe3912f677359d58bdebf6732c37c408aad not found: ID does not exist" containerID="6814d0e379f048ff40b1c868927a2fe3912f677359d58bdebf6732c37c408aad" Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.480006 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6814d0e379f048ff40b1c868927a2fe3912f677359d58bdebf6732c37c408aad"} err="failed to get container status \"6814d0e379f048ff40b1c868927a2fe3912f677359d58bdebf6732c37c408aad\": rpc error: code = NotFound desc = could not find container \"6814d0e379f048ff40b1c868927a2fe3912f677359d58bdebf6732c37c408aad\": container with ID starting with 6814d0e379f048ff40b1c868927a2fe3912f677359d58bdebf6732c37c408aad not found: ID does not exist" Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.480031 4899 scope.go:117] "RemoveContainer" containerID="7dadf8e2f7741cf56ae7d66ad25eac613423e544745a15e9651840a8bc7f9c21" Oct 03 09:15:47 crc kubenswrapper[4899]: E1003 09:15:47.480591 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7dadf8e2f7741cf56ae7d66ad25eac613423e544745a15e9651840a8bc7f9c21\": container with ID starting with 7dadf8e2f7741cf56ae7d66ad25eac613423e544745a15e9651840a8bc7f9c21 not found: ID does not exist" containerID="7dadf8e2f7741cf56ae7d66ad25eac613423e544745a15e9651840a8bc7f9c21" Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.480620 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dadf8e2f7741cf56ae7d66ad25eac613423e544745a15e9651840a8bc7f9c21"} err="failed to get container status \"7dadf8e2f7741cf56ae7d66ad25eac613423e544745a15e9651840a8bc7f9c21\": rpc error: code = NotFound desc = could not find container \"7dadf8e2f7741cf56ae7d66ad25eac613423e544745a15e9651840a8bc7f9c21\": container with ID starting with 7dadf8e2f7741cf56ae7d66ad25eac613423e544745a15e9651840a8bc7f9c21 not found: ID does not exist" Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.480638 4899 scope.go:117] "RemoveContainer" containerID="e6984666b8f9bde7fc3e383ee47496eac9d1f6852b66824dbbfa92476cf9a02d" Oct 03 09:15:47 crc kubenswrapper[4899]: E1003 09:15:47.481120 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6984666b8f9bde7fc3e383ee47496eac9d1f6852b66824dbbfa92476cf9a02d\": container with ID starting with e6984666b8f9bde7fc3e383ee47496eac9d1f6852b66824dbbfa92476cf9a02d not found: ID does not exist" containerID="e6984666b8f9bde7fc3e383ee47496eac9d1f6852b66824dbbfa92476cf9a02d" Oct 03 09:15:47 crc kubenswrapper[4899]: I1003 09:15:47.481149 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6984666b8f9bde7fc3e383ee47496eac9d1f6852b66824dbbfa92476cf9a02d"} err="failed to get container status \"e6984666b8f9bde7fc3e383ee47496eac9d1f6852b66824dbbfa92476cf9a02d\": rpc error: code = NotFound desc = could not find container \"e6984666b8f9bde7fc3e383ee47496eac9d1f6852b66824dbbfa92476cf9a02d\": container with ID starting with e6984666b8f9bde7fc3e383ee47496eac9d1f6852b66824dbbfa92476cf9a02d not found: ID does not exist" Oct 03 09:15:48 crc kubenswrapper[4899]: I1003 09:15:48.306884 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pp9zj" Oct 03 09:15:48 crc kubenswrapper[4899]: I1003 09:15:48.307236 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pp9zj" Oct 03 09:15:48 crc kubenswrapper[4899]: I1003 09:15:48.351383 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pp9zj" Oct 03 09:15:48 crc kubenswrapper[4899]: I1003 09:15:48.538822 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="375e78f5-be3a-4196-b4d2-ed13af00e588" path="/var/lib/kubelet/pods/375e78f5-be3a-4196-b4d2-ed13af00e588/volumes" Oct 03 09:15:49 crc kubenswrapper[4899]: I1003 09:15:49.951728 4899 scope.go:117] "RemoveContainer" containerID="c71db21c2b595c777be240aa64dd0636db22025cd3ee053c30f92ff51a8efc4b" Oct 03 09:15:58 crc kubenswrapper[4899]: I1003 09:15:58.352690 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pp9zj" Oct 03 09:15:58 crc kubenswrapper[4899]: I1003 09:15:58.396063 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pp9zj"] Oct 03 09:15:58 crc kubenswrapper[4899]: I1003 09:15:58.431127 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pp9zj" podUID="b44af5e6-263c-42bf-a316-572253523719" containerName="registry-server" containerID="cri-o://1db0a7c0f0ca0d5f39f3d0b9b6eaa8cc6a4e2fd676e0066a40915c1640673d40" gracePeriod=2 Oct 03 09:15:58 crc kubenswrapper[4899]: I1003 09:15:58.860425 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pp9zj" Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.054672 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b44af5e6-263c-42bf-a316-572253523719-catalog-content\") pod \"b44af5e6-263c-42bf-a316-572253523719\" (UID: \"b44af5e6-263c-42bf-a316-572253523719\") " Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.054733 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w88b9\" (UniqueName: \"kubernetes.io/projected/b44af5e6-263c-42bf-a316-572253523719-kube-api-access-w88b9\") pod \"b44af5e6-263c-42bf-a316-572253523719\" (UID: \"b44af5e6-263c-42bf-a316-572253523719\") " Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.054850 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b44af5e6-263c-42bf-a316-572253523719-utilities\") pod \"b44af5e6-263c-42bf-a316-572253523719\" (UID: \"b44af5e6-263c-42bf-a316-572253523719\") " Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.055981 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b44af5e6-263c-42bf-a316-572253523719-utilities" (OuterVolumeSpecName: "utilities") pod "b44af5e6-263c-42bf-a316-572253523719" (UID: "b44af5e6-263c-42bf-a316-572253523719"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.060574 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b44af5e6-263c-42bf-a316-572253523719-kube-api-access-w88b9" (OuterVolumeSpecName: "kube-api-access-w88b9") pod "b44af5e6-263c-42bf-a316-572253523719" (UID: "b44af5e6-263c-42bf-a316-572253523719"). InnerVolumeSpecName "kube-api-access-w88b9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.139780 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b44af5e6-263c-42bf-a316-572253523719-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b44af5e6-263c-42bf-a316-572253523719" (UID: "b44af5e6-263c-42bf-a316-572253523719"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.157871 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w88b9\" (UniqueName: \"kubernetes.io/projected/b44af5e6-263c-42bf-a316-572253523719-kube-api-access-w88b9\") on node \"crc\" DevicePath \"\"" Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.157954 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b44af5e6-263c-42bf-a316-572253523719-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.157965 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b44af5e6-263c-42bf-a316-572253523719-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.442015 4899 generic.go:334] "Generic (PLEG): container finished" podID="b44af5e6-263c-42bf-a316-572253523719" containerID="1db0a7c0f0ca0d5f39f3d0b9b6eaa8cc6a4e2fd676e0066a40915c1640673d40" exitCode=0 Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.442063 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pp9zj" event={"ID":"b44af5e6-263c-42bf-a316-572253523719","Type":"ContainerDied","Data":"1db0a7c0f0ca0d5f39f3d0b9b6eaa8cc6a4e2fd676e0066a40915c1640673d40"} Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.442093 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pp9zj" event={"ID":"b44af5e6-263c-42bf-a316-572253523719","Type":"ContainerDied","Data":"e5b363f3ac644603b0707d76cf98503196c77c3086385f12fa14854c60d4a92e"} Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.442103 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pp9zj" Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.442115 4899 scope.go:117] "RemoveContainer" containerID="1db0a7c0f0ca0d5f39f3d0b9b6eaa8cc6a4e2fd676e0066a40915c1640673d40" Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.464671 4899 scope.go:117] "RemoveContainer" containerID="484cbc85e4f1f7f995e771c9e0e4cb6c84db4a6e78d621d28a362e7671672878" Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.477310 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pp9zj"] Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.484660 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pp9zj"] Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.508250 4899 scope.go:117] "RemoveContainer" containerID="89e9dc6dc866c4e4af178cfae42de437a981a8bb816e497213d60b68e7a640b3" Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.534300 4899 scope.go:117] "RemoveContainer" containerID="1db0a7c0f0ca0d5f39f3d0b9b6eaa8cc6a4e2fd676e0066a40915c1640673d40" Oct 03 09:15:59 crc kubenswrapper[4899]: E1003 09:15:59.536323 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1db0a7c0f0ca0d5f39f3d0b9b6eaa8cc6a4e2fd676e0066a40915c1640673d40\": container with ID starting with 1db0a7c0f0ca0d5f39f3d0b9b6eaa8cc6a4e2fd676e0066a40915c1640673d40 not found: ID does not exist" containerID="1db0a7c0f0ca0d5f39f3d0b9b6eaa8cc6a4e2fd676e0066a40915c1640673d40" Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.536365 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1db0a7c0f0ca0d5f39f3d0b9b6eaa8cc6a4e2fd676e0066a40915c1640673d40"} err="failed to get container status \"1db0a7c0f0ca0d5f39f3d0b9b6eaa8cc6a4e2fd676e0066a40915c1640673d40\": rpc error: code = NotFound desc = could not find container \"1db0a7c0f0ca0d5f39f3d0b9b6eaa8cc6a4e2fd676e0066a40915c1640673d40\": container with ID starting with 1db0a7c0f0ca0d5f39f3d0b9b6eaa8cc6a4e2fd676e0066a40915c1640673d40 not found: ID does not exist" Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.536393 4899 scope.go:117] "RemoveContainer" containerID="484cbc85e4f1f7f995e771c9e0e4cb6c84db4a6e78d621d28a362e7671672878" Oct 03 09:15:59 crc kubenswrapper[4899]: E1003 09:15:59.536770 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"484cbc85e4f1f7f995e771c9e0e4cb6c84db4a6e78d621d28a362e7671672878\": container with ID starting with 484cbc85e4f1f7f995e771c9e0e4cb6c84db4a6e78d621d28a362e7671672878 not found: ID does not exist" containerID="484cbc85e4f1f7f995e771c9e0e4cb6c84db4a6e78d621d28a362e7671672878" Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.536815 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"484cbc85e4f1f7f995e771c9e0e4cb6c84db4a6e78d621d28a362e7671672878"} err="failed to get container status \"484cbc85e4f1f7f995e771c9e0e4cb6c84db4a6e78d621d28a362e7671672878\": rpc error: code = NotFound desc = could not find container \"484cbc85e4f1f7f995e771c9e0e4cb6c84db4a6e78d621d28a362e7671672878\": container with ID starting with 484cbc85e4f1f7f995e771c9e0e4cb6c84db4a6e78d621d28a362e7671672878 not found: ID does not exist" Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.536844 4899 scope.go:117] "RemoveContainer" containerID="89e9dc6dc866c4e4af178cfae42de437a981a8bb816e497213d60b68e7a640b3" Oct 03 09:15:59 crc kubenswrapper[4899]: E1003 09:15:59.537097 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89e9dc6dc866c4e4af178cfae42de437a981a8bb816e497213d60b68e7a640b3\": container with ID starting with 89e9dc6dc866c4e4af178cfae42de437a981a8bb816e497213d60b68e7a640b3 not found: ID does not exist" containerID="89e9dc6dc866c4e4af178cfae42de437a981a8bb816e497213d60b68e7a640b3" Oct 03 09:15:59 crc kubenswrapper[4899]: I1003 09:15:59.537126 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89e9dc6dc866c4e4af178cfae42de437a981a8bb816e497213d60b68e7a640b3"} err="failed to get container status \"89e9dc6dc866c4e4af178cfae42de437a981a8bb816e497213d60b68e7a640b3\": rpc error: code = NotFound desc = could not find container \"89e9dc6dc866c4e4af178cfae42de437a981a8bb816e497213d60b68e7a640b3\": container with ID starting with 89e9dc6dc866c4e4af178cfae42de437a981a8bb816e497213d60b68e7a640b3 not found: ID does not exist" Oct 03 09:16:00 crc kubenswrapper[4899]: I1003 09:16:00.536585 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b44af5e6-263c-42bf-a316-572253523719" path="/var/lib/kubelet/pods/b44af5e6-263c-42bf-a316-572253523719/volumes" Oct 03 09:16:12 crc kubenswrapper[4899]: I1003 09:16:12.198747 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:16:12 crc kubenswrapper[4899]: I1003 09:16:12.199246 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:16:42 crc kubenswrapper[4899]: I1003 09:16:42.198294 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:16:42 crc kubenswrapper[4899]: I1003 09:16:42.199211 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:17:12 crc kubenswrapper[4899]: I1003 09:17:12.198591 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:17:12 crc kubenswrapper[4899]: I1003 09:17:12.199137 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:17:12 crc kubenswrapper[4899]: I1003 09:17:12.199190 4899 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 09:17:12 crc kubenswrapper[4899]: I1003 09:17:12.200038 4899 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27"} pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 09:17:12 crc kubenswrapper[4899]: I1003 09:17:12.200107 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" containerID="cri-o://ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" gracePeriod=600 Oct 03 09:17:12 crc kubenswrapper[4899]: E1003 09:17:12.318667 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:17:13 crc kubenswrapper[4899]: I1003 09:17:13.078693 4899 generic.go:334] "Generic (PLEG): container finished" podID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" exitCode=0 Oct 03 09:17:13 crc kubenswrapper[4899]: I1003 09:17:13.078785 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerDied","Data":"ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27"} Oct 03 09:17:13 crc kubenswrapper[4899]: I1003 09:17:13.079072 4899 scope.go:117] "RemoveContainer" containerID="b2b34b8d69d69b7d2982e1866f37318d87ef851d9135ccb09c02fa5d8cd572f5" Oct 03 09:17:13 crc kubenswrapper[4899]: I1003 09:17:13.080031 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:17:13 crc kubenswrapper[4899]: E1003 09:17:13.082287 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:17:25 crc kubenswrapper[4899]: I1003 09:17:25.527222 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:17:25 crc kubenswrapper[4899]: E1003 09:17:25.527995 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:17:27 crc kubenswrapper[4899]: I1003 09:17:27.186248 4899 generic.go:334] "Generic (PLEG): container finished" podID="d9001fcb-add1-41c8-9638-097229339246" containerID="f1c74955aad972357d9ed2abb328681c5806afd86ec953f4b209f8121bd8ec58" exitCode=0 Oct 03 09:17:27 crc kubenswrapper[4899]: I1003 09:17:27.186325 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" event={"ID":"d9001fcb-add1-41c8-9638-097229339246","Type":"ContainerDied","Data":"f1c74955aad972357d9ed2abb328681c5806afd86ec953f4b209f8121bd8ec58"} Oct 03 09:17:28 crc kubenswrapper[4899]: I1003 09:17:28.579758 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:17:28 crc kubenswrapper[4899]: I1003 09:17:28.591926 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-libvirt-combined-ca-bundle\") pod \"d9001fcb-add1-41c8-9638-097229339246\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " Oct 03 09:17:28 crc kubenswrapper[4899]: I1003 09:17:28.592056 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-ssh-key\") pod \"d9001fcb-add1-41c8-9638-097229339246\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " Oct 03 09:17:28 crc kubenswrapper[4899]: I1003 09:17:28.592098 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-libvirt-secret-0\") pod \"d9001fcb-add1-41c8-9638-097229339246\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " Oct 03 09:17:28 crc kubenswrapper[4899]: I1003 09:17:28.592190 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-inventory\") pod \"d9001fcb-add1-41c8-9638-097229339246\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " Oct 03 09:17:28 crc kubenswrapper[4899]: I1003 09:17:28.592223 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gs7l\" (UniqueName: \"kubernetes.io/projected/d9001fcb-add1-41c8-9638-097229339246-kube-api-access-5gs7l\") pod \"d9001fcb-add1-41c8-9638-097229339246\" (UID: \"d9001fcb-add1-41c8-9638-097229339246\") " Oct 03 09:17:28 crc kubenswrapper[4899]: I1003 09:17:28.604354 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9001fcb-add1-41c8-9638-097229339246-kube-api-access-5gs7l" (OuterVolumeSpecName: "kube-api-access-5gs7l") pod "d9001fcb-add1-41c8-9638-097229339246" (UID: "d9001fcb-add1-41c8-9638-097229339246"). InnerVolumeSpecName "kube-api-access-5gs7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:17:28 crc kubenswrapper[4899]: I1003 09:17:28.606861 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "d9001fcb-add1-41c8-9638-097229339246" (UID: "d9001fcb-add1-41c8-9638-097229339246"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:17:28 crc kubenswrapper[4899]: I1003 09:17:28.623126 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d9001fcb-add1-41c8-9638-097229339246" (UID: "d9001fcb-add1-41c8-9638-097229339246"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:17:28 crc kubenswrapper[4899]: I1003 09:17:28.625761 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-inventory" (OuterVolumeSpecName: "inventory") pod "d9001fcb-add1-41c8-9638-097229339246" (UID: "d9001fcb-add1-41c8-9638-097229339246"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:17:28 crc kubenswrapper[4899]: I1003 09:17:28.632236 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "d9001fcb-add1-41c8-9638-097229339246" (UID: "d9001fcb-add1-41c8-9638-097229339246"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:17:28 crc kubenswrapper[4899]: I1003 09:17:28.696322 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 09:17:28 crc kubenswrapper[4899]: I1003 09:17:28.696578 4899 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Oct 03 09:17:28 crc kubenswrapper[4899]: I1003 09:17:28.696648 4899 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 09:17:28 crc kubenswrapper[4899]: I1003 09:17:28.696704 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gs7l\" (UniqueName: \"kubernetes.io/projected/d9001fcb-add1-41c8-9638-097229339246-kube-api-access-5gs7l\") on node \"crc\" DevicePath \"\"" Oct 03 09:17:28 crc kubenswrapper[4899]: I1003 09:17:28.696764 4899 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9001fcb-add1-41c8-9638-097229339246-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.204108 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" event={"ID":"d9001fcb-add1-41c8-9638-097229339246","Type":"ContainerDied","Data":"1a00fdbfc841d7957e7888505e85c9b9e93977f5c493b59c34e7e4780cbf0e98"} Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.204149 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a00fdbfc841d7957e7888505e85c9b9e93977f5c493b59c34e7e4780cbf0e98" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.204178 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cb474" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.286957 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7"] Oct 03 09:17:29 crc kubenswrapper[4899]: E1003 09:17:29.287327 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9001fcb-add1-41c8-9638-097229339246" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.287341 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9001fcb-add1-41c8-9638-097229339246" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 03 09:17:29 crc kubenswrapper[4899]: E1003 09:17:29.287358 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="375e78f5-be3a-4196-b4d2-ed13af00e588" containerName="registry-server" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.287365 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="375e78f5-be3a-4196-b4d2-ed13af00e588" containerName="registry-server" Oct 03 09:17:29 crc kubenswrapper[4899]: E1003 09:17:29.287390 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b44af5e6-263c-42bf-a316-572253523719" containerName="extract-utilities" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.287397 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="b44af5e6-263c-42bf-a316-572253523719" containerName="extract-utilities" Oct 03 09:17:29 crc kubenswrapper[4899]: E1003 09:17:29.287410 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="375e78f5-be3a-4196-b4d2-ed13af00e588" containerName="extract-utilities" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.287416 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="375e78f5-be3a-4196-b4d2-ed13af00e588" containerName="extract-utilities" Oct 03 09:17:29 crc kubenswrapper[4899]: E1003 09:17:29.287425 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="375e78f5-be3a-4196-b4d2-ed13af00e588" containerName="extract-content" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.287432 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="375e78f5-be3a-4196-b4d2-ed13af00e588" containerName="extract-content" Oct 03 09:17:29 crc kubenswrapper[4899]: E1003 09:17:29.287442 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b44af5e6-263c-42bf-a316-572253523719" containerName="extract-content" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.287447 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="b44af5e6-263c-42bf-a316-572253523719" containerName="extract-content" Oct 03 09:17:29 crc kubenswrapper[4899]: E1003 09:17:29.287463 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b44af5e6-263c-42bf-a316-572253523719" containerName="registry-server" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.287472 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="b44af5e6-263c-42bf-a316-572253523719" containerName="registry-server" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.287648 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="b44af5e6-263c-42bf-a316-572253523719" containerName="registry-server" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.287664 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9001fcb-add1-41c8-9638-097229339246" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.287673 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="375e78f5-be3a-4196-b4d2-ed13af00e588" containerName="registry-server" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.289249 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.291497 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.291940 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.292215 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.292478 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.292875 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.293197 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.294876 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.298562 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7"] Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.306954 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.307029 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.307056 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.307084 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.307121 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.307167 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.307208 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.307232 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.307249 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7cdv\" (UniqueName: \"kubernetes.io/projected/0bc97030-8da8-4cd2-8645-9962d50b08d3-kube-api-access-s7cdv\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.408539 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.408622 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.408661 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.408686 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.408705 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7cdv\" (UniqueName: \"kubernetes.io/projected/0bc97030-8da8-4cd2-8645-9962d50b08d3-kube-api-access-s7cdv\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.408744 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.408785 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.408811 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.408846 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.409698 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.413348 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.413474 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.413652 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.413908 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.414333 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.415129 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.415421 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.426725 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7cdv\" (UniqueName: \"kubernetes.io/projected/0bc97030-8da8-4cd2-8645-9962d50b08d3-kube-api-access-s7cdv\") pod \"nova-edpm-deployment-openstack-edpm-ipam-kmjd7\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:29 crc kubenswrapper[4899]: I1003 09:17:29.612682 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:17:30 crc kubenswrapper[4899]: I1003 09:17:30.106693 4899 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 09:17:30 crc kubenswrapper[4899]: I1003 09:17:30.116642 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7"] Oct 03 09:17:30 crc kubenswrapper[4899]: I1003 09:17:30.212080 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" event={"ID":"0bc97030-8da8-4cd2-8645-9962d50b08d3","Type":"ContainerStarted","Data":"4e535664c378297d603d740fd9ed0a271dfd51572172732f0e6b78c6438f7cb5"} Oct 03 09:17:31 crc kubenswrapper[4899]: I1003 09:17:31.229280 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" event={"ID":"0bc97030-8da8-4cd2-8645-9962d50b08d3","Type":"ContainerStarted","Data":"d3b7c7052bfc2cc1704f5d602bca782fe363c81dcf1fa67c96bf4fcfea2a73fb"} Oct 03 09:17:37 crc kubenswrapper[4899]: I1003 09:17:37.526913 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:17:37 crc kubenswrapper[4899]: E1003 09:17:37.527805 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:17:49 crc kubenswrapper[4899]: I1003 09:17:49.528427 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:17:49 crc kubenswrapper[4899]: E1003 09:17:49.529243 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:18:02 crc kubenswrapper[4899]: I1003 09:18:02.527474 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:18:02 crc kubenswrapper[4899]: E1003 09:18:02.528232 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:18:17 crc kubenswrapper[4899]: I1003 09:18:17.527414 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:18:17 crc kubenswrapper[4899]: E1003 09:18:17.528347 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:18:30 crc kubenswrapper[4899]: I1003 09:18:30.526946 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:18:30 crc kubenswrapper[4899]: E1003 09:18:30.527730 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:18:41 crc kubenswrapper[4899]: I1003 09:18:41.527092 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:18:41 crc kubenswrapper[4899]: E1003 09:18:41.527787 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:18:53 crc kubenswrapper[4899]: I1003 09:18:53.527114 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:18:53 crc kubenswrapper[4899]: E1003 09:18:53.527859 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:19:07 crc kubenswrapper[4899]: I1003 09:19:07.526965 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:19:07 crc kubenswrapper[4899]: E1003 09:19:07.527713 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:19:20 crc kubenswrapper[4899]: I1003 09:19:20.526868 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:19:20 crc kubenswrapper[4899]: E1003 09:19:20.527675 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:19:31 crc kubenswrapper[4899]: I1003 09:19:31.526756 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:19:31 crc kubenswrapper[4899]: E1003 09:19:31.527451 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:19:46 crc kubenswrapper[4899]: I1003 09:19:46.533914 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:19:46 crc kubenswrapper[4899]: E1003 09:19:46.535369 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:19:58 crc kubenswrapper[4899]: I1003 09:19:58.527415 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:19:58 crc kubenswrapper[4899]: E1003 09:19:58.528241 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:20:11 crc kubenswrapper[4899]: I1003 09:20:11.528330 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:20:11 crc kubenswrapper[4899]: E1003 09:20:11.529348 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:20:23 crc kubenswrapper[4899]: I1003 09:20:23.528077 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:20:23 crc kubenswrapper[4899]: E1003 09:20:23.528741 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:20:24 crc kubenswrapper[4899]: I1003 09:20:24.689559 4899 generic.go:334] "Generic (PLEG): container finished" podID="0bc97030-8da8-4cd2-8645-9962d50b08d3" containerID="d3b7c7052bfc2cc1704f5d602bca782fe363c81dcf1fa67c96bf4fcfea2a73fb" exitCode=0 Oct 03 09:20:24 crc kubenswrapper[4899]: I1003 09:20:24.689612 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" event={"ID":"0bc97030-8da8-4cd2-8645-9962d50b08d3","Type":"ContainerDied","Data":"d3b7c7052bfc2cc1704f5d602bca782fe363c81dcf1fa67c96bf4fcfea2a73fb"} Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.085564 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.258356 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-migration-ssh-key-1\") pod \"0bc97030-8da8-4cd2-8645-9962d50b08d3\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.258909 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-combined-ca-bundle\") pod \"0bc97030-8da8-4cd2-8645-9962d50b08d3\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.258951 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-extra-config-0\") pod \"0bc97030-8da8-4cd2-8645-9962d50b08d3\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.259059 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-inventory\") pod \"0bc97030-8da8-4cd2-8645-9962d50b08d3\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.259086 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-migration-ssh-key-0\") pod \"0bc97030-8da8-4cd2-8645-9962d50b08d3\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.259128 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-cell1-compute-config-1\") pod \"0bc97030-8da8-4cd2-8645-9962d50b08d3\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.259235 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7cdv\" (UniqueName: \"kubernetes.io/projected/0bc97030-8da8-4cd2-8645-9962d50b08d3-kube-api-access-s7cdv\") pod \"0bc97030-8da8-4cd2-8645-9962d50b08d3\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.259283 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-ssh-key\") pod \"0bc97030-8da8-4cd2-8645-9962d50b08d3\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.259310 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-cell1-compute-config-0\") pod \"0bc97030-8da8-4cd2-8645-9962d50b08d3\" (UID: \"0bc97030-8da8-4cd2-8645-9962d50b08d3\") " Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.264304 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bc97030-8da8-4cd2-8645-9962d50b08d3-kube-api-access-s7cdv" (OuterVolumeSpecName: "kube-api-access-s7cdv") pod "0bc97030-8da8-4cd2-8645-9962d50b08d3" (UID: "0bc97030-8da8-4cd2-8645-9962d50b08d3"). InnerVolumeSpecName "kube-api-access-s7cdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.265298 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "0bc97030-8da8-4cd2-8645-9962d50b08d3" (UID: "0bc97030-8da8-4cd2-8645-9962d50b08d3"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.286315 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "0bc97030-8da8-4cd2-8645-9962d50b08d3" (UID: "0bc97030-8da8-4cd2-8645-9962d50b08d3"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.288461 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "0bc97030-8da8-4cd2-8645-9962d50b08d3" (UID: "0bc97030-8da8-4cd2-8645-9962d50b08d3"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.289017 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-inventory" (OuterVolumeSpecName: "inventory") pod "0bc97030-8da8-4cd2-8645-9962d50b08d3" (UID: "0bc97030-8da8-4cd2-8645-9962d50b08d3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.293020 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0bc97030-8da8-4cd2-8645-9962d50b08d3" (UID: "0bc97030-8da8-4cd2-8645-9962d50b08d3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.293133 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "0bc97030-8da8-4cd2-8645-9962d50b08d3" (UID: "0bc97030-8da8-4cd2-8645-9962d50b08d3"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.295239 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "0bc97030-8da8-4cd2-8645-9962d50b08d3" (UID: "0bc97030-8da8-4cd2-8645-9962d50b08d3"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.295686 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "0bc97030-8da8-4cd2-8645-9962d50b08d3" (UID: "0bc97030-8da8-4cd2-8645-9962d50b08d3"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.361475 4899 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.361507 4899 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.361519 4899 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.361528 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7cdv\" (UniqueName: \"kubernetes.io/projected/0bc97030-8da8-4cd2-8645-9962d50b08d3-kube-api-access-s7cdv\") on node \"crc\" DevicePath \"\"" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.361536 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.361544 4899 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.361551 4899 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.361562 4899 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.361571 4899 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/0bc97030-8da8-4cd2-8645-9962d50b08d3-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.711224 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" event={"ID":"0bc97030-8da8-4cd2-8645-9962d50b08d3","Type":"ContainerDied","Data":"4e535664c378297d603d740fd9ed0a271dfd51572172732f0e6b78c6438f7cb5"} Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.711265 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e535664c378297d603d740fd9ed0a271dfd51572172732f0e6b78c6438f7cb5" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.711320 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-kmjd7" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.803401 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265"] Oct 03 09:20:26 crc kubenswrapper[4899]: E1003 09:20:26.803811 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bc97030-8da8-4cd2-8645-9962d50b08d3" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.803831 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bc97030-8da8-4cd2-8645-9962d50b08d3" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.804032 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bc97030-8da8-4cd2-8645-9962d50b08d3" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.804660 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.806327 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-pnmjv" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.806949 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.807055 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.809968 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.811715 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.837143 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265"] Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.973267 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.973349 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.973514 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.973559 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.973731 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgr8k\" (UniqueName: \"kubernetes.io/projected/78460eaf-b283-4155-be7c-57230376bbcc-kube-api-access-mgr8k\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.973766 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:26 crc kubenswrapper[4899]: I1003 09:20:26.974090 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:27 crc kubenswrapper[4899]: I1003 09:20:27.075782 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:27 crc kubenswrapper[4899]: I1003 09:20:27.075846 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:27 crc kubenswrapper[4899]: I1003 09:20:27.075874 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:27 crc kubenswrapper[4899]: I1003 09:20:27.075915 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:27 crc kubenswrapper[4899]: I1003 09:20:27.075964 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgr8k\" (UniqueName: \"kubernetes.io/projected/78460eaf-b283-4155-be7c-57230376bbcc-kube-api-access-mgr8k\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:27 crc kubenswrapper[4899]: I1003 09:20:27.075985 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:27 crc kubenswrapper[4899]: I1003 09:20:27.076801 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:27 crc kubenswrapper[4899]: I1003 09:20:27.080737 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:27 crc kubenswrapper[4899]: I1003 09:20:27.081441 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:27 crc kubenswrapper[4899]: I1003 09:20:27.091404 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:27 crc kubenswrapper[4899]: I1003 09:20:27.091500 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:27 crc kubenswrapper[4899]: I1003 09:20:27.091507 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgr8k\" (UniqueName: \"kubernetes.io/projected/78460eaf-b283-4155-be7c-57230376bbcc-kube-api-access-mgr8k\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:27 crc kubenswrapper[4899]: I1003 09:20:27.092570 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:27 crc kubenswrapper[4899]: I1003 09:20:27.093018 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kw265\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:27 crc kubenswrapper[4899]: I1003 09:20:27.125649 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:20:27 crc kubenswrapper[4899]: I1003 09:20:27.706832 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265"] Oct 03 09:20:28 crc kubenswrapper[4899]: E1003 09:20:28.664713 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0bc97030_8da8_4cd2_8645_9962d50b08d3.slice\": RecentStats: unable to find data in memory cache]" Oct 03 09:20:28 crc kubenswrapper[4899]: I1003 09:20:28.730269 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" event={"ID":"78460eaf-b283-4155-be7c-57230376bbcc","Type":"ContainerStarted","Data":"2a8907d908ee219cb5f16d4ae049ec83e5d3083a660c502145fccfa8a62aab3c"} Oct 03 09:20:28 crc kubenswrapper[4899]: I1003 09:20:28.730312 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" event={"ID":"78460eaf-b283-4155-be7c-57230376bbcc","Type":"ContainerStarted","Data":"a41af09a25ed5bb26ae00dcff3ce1f86bae4d28bcd2231187b27206a7ec39976"} Oct 03 09:20:28 crc kubenswrapper[4899]: I1003 09:20:28.753415 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" podStartSLOduration=2.335906552 podStartE2EDuration="2.753397013s" podCreationTimestamp="2025-10-03 09:20:26 +0000 UTC" firstStartedPulling="2025-10-03 09:20:27.721323044 +0000 UTC m=+2401.828807997" lastFinishedPulling="2025-10-03 09:20:28.138813505 +0000 UTC m=+2402.246298458" observedRunningTime="2025-10-03 09:20:28.750061707 +0000 UTC m=+2402.857546660" watchObservedRunningTime="2025-10-03 09:20:28.753397013 +0000 UTC m=+2402.860881966" Oct 03 09:20:35 crc kubenswrapper[4899]: I1003 09:20:35.527429 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:20:35 crc kubenswrapper[4899]: E1003 09:20:35.528340 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:20:38 crc kubenswrapper[4899]: E1003 09:20:38.904752 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0bc97030_8da8_4cd2_8645_9962d50b08d3.slice\": RecentStats: unable to find data in memory cache]" Oct 03 09:20:46 crc kubenswrapper[4899]: I1003 09:20:46.533531 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:20:46 crc kubenswrapper[4899]: E1003 09:20:46.535547 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:20:49 crc kubenswrapper[4899]: E1003 09:20:49.136267 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0bc97030_8da8_4cd2_8645_9962d50b08d3.slice\": RecentStats: unable to find data in memory cache]" Oct 03 09:20:57 crc kubenswrapper[4899]: I1003 09:20:57.526748 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:20:57 crc kubenswrapper[4899]: E1003 09:20:57.527789 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:20:59 crc kubenswrapper[4899]: E1003 09:20:59.373548 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0bc97030_8da8_4cd2_8645_9962d50b08d3.slice\": RecentStats: unable to find data in memory cache]" Oct 03 09:21:09 crc kubenswrapper[4899]: E1003 09:21:09.602600 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0bc97030_8da8_4cd2_8645_9962d50b08d3.slice\": RecentStats: unable to find data in memory cache]" Oct 03 09:21:11 crc kubenswrapper[4899]: I1003 09:21:11.526921 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:21:11 crc kubenswrapper[4899]: E1003 09:21:11.528564 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:21:19 crc kubenswrapper[4899]: E1003 09:21:19.851650 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0bc97030_8da8_4cd2_8645_9962d50b08d3.slice\": RecentStats: unable to find data in memory cache]" Oct 03 09:21:24 crc kubenswrapper[4899]: I1003 09:21:24.527276 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:21:24 crc kubenswrapper[4899]: E1003 09:21:24.528150 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:21:36 crc kubenswrapper[4899]: I1003 09:21:36.534240 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:21:36 crc kubenswrapper[4899]: E1003 09:21:36.535056 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:21:49 crc kubenswrapper[4899]: I1003 09:21:49.528514 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:21:49 crc kubenswrapper[4899]: E1003 09:21:49.529685 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:22:03 crc kubenswrapper[4899]: I1003 09:22:03.527119 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:22:03 crc kubenswrapper[4899]: E1003 09:22:03.527790 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:22:18 crc kubenswrapper[4899]: I1003 09:22:18.527042 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:22:19 crc kubenswrapper[4899]: I1003 09:22:19.671171 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerStarted","Data":"d5dd56986e2dd2f071f94a8855f6a4fbd34efa104bfcd4d459ad9f6f9563b980"} Oct 03 09:22:33 crc kubenswrapper[4899]: I1003 09:22:33.794097 4899 generic.go:334] "Generic (PLEG): container finished" podID="78460eaf-b283-4155-be7c-57230376bbcc" containerID="2a8907d908ee219cb5f16d4ae049ec83e5d3083a660c502145fccfa8a62aab3c" exitCode=0 Oct 03 09:22:33 crc kubenswrapper[4899]: I1003 09:22:33.794200 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" event={"ID":"78460eaf-b283-4155-be7c-57230376bbcc","Type":"ContainerDied","Data":"2a8907d908ee219cb5f16d4ae049ec83e5d3083a660c502145fccfa8a62aab3c"} Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.201867 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.260112 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-telemetry-combined-ca-bundle\") pod \"78460eaf-b283-4155-be7c-57230376bbcc\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.260548 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ssh-key\") pod \"78460eaf-b283-4155-be7c-57230376bbcc\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.260684 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ceilometer-compute-config-data-0\") pod \"78460eaf-b283-4155-be7c-57230376bbcc\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.260717 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgr8k\" (UniqueName: \"kubernetes.io/projected/78460eaf-b283-4155-be7c-57230376bbcc-kube-api-access-mgr8k\") pod \"78460eaf-b283-4155-be7c-57230376bbcc\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.260786 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ceilometer-compute-config-data-2\") pod \"78460eaf-b283-4155-be7c-57230376bbcc\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.260836 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-inventory\") pod \"78460eaf-b283-4155-be7c-57230376bbcc\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.260964 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ceilometer-compute-config-data-1\") pod \"78460eaf-b283-4155-be7c-57230376bbcc\" (UID: \"78460eaf-b283-4155-be7c-57230376bbcc\") " Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.271218 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78460eaf-b283-4155-be7c-57230376bbcc-kube-api-access-mgr8k" (OuterVolumeSpecName: "kube-api-access-mgr8k") pod "78460eaf-b283-4155-be7c-57230376bbcc" (UID: "78460eaf-b283-4155-be7c-57230376bbcc"). InnerVolumeSpecName "kube-api-access-mgr8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.272161 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "78460eaf-b283-4155-be7c-57230376bbcc" (UID: "78460eaf-b283-4155-be7c-57230376bbcc"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.291744 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "78460eaf-b283-4155-be7c-57230376bbcc" (UID: "78460eaf-b283-4155-be7c-57230376bbcc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.293053 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "78460eaf-b283-4155-be7c-57230376bbcc" (UID: "78460eaf-b283-4155-be7c-57230376bbcc"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.294529 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "78460eaf-b283-4155-be7c-57230376bbcc" (UID: "78460eaf-b283-4155-be7c-57230376bbcc"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.300023 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "78460eaf-b283-4155-be7c-57230376bbcc" (UID: "78460eaf-b283-4155-be7c-57230376bbcc"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.301211 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-inventory" (OuterVolumeSpecName: "inventory") pod "78460eaf-b283-4155-be7c-57230376bbcc" (UID: "78460eaf-b283-4155-be7c-57230376bbcc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.363613 4899 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.363651 4899 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.363661 4899 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.363671 4899 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.363680 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.363688 4899 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/78460eaf-b283-4155-be7c-57230376bbcc-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.363697 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgr8k\" (UniqueName: \"kubernetes.io/projected/78460eaf-b283-4155-be7c-57230376bbcc-kube-api-access-mgr8k\") on node \"crc\" DevicePath \"\"" Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.813838 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" event={"ID":"78460eaf-b283-4155-be7c-57230376bbcc","Type":"ContainerDied","Data":"a41af09a25ed5bb26ae00dcff3ce1f86bae4d28bcd2231187b27206a7ec39976"} Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.813880 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a41af09a25ed5bb26ae00dcff3ce1f86bae4d28bcd2231187b27206a7ec39976" Oct 03 09:22:35 crc kubenswrapper[4899]: I1003 09:22:35.813958 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kw265" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.150838 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Oct 03 09:23:35 crc kubenswrapper[4899]: E1003 09:23:35.152089 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78460eaf-b283-4155-be7c-57230376bbcc" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.152112 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="78460eaf-b283-4155-be7c-57230376bbcc" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.152343 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="78460eaf-b283-4155-be7c-57230376bbcc" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.153166 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.155592 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-bfrgk" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.155714 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.156503 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.159961 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.161494 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.221909 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.221965 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-config-data\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.221992 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.324083 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.324132 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-config-data\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.324160 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.324189 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.324229 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.324248 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.324282 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.324313 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2ktc\" (UniqueName: \"kubernetes.io/projected/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-kube-api-access-j2ktc\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.324366 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.325470 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.325882 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-config-data\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.333858 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.425582 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.425653 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.425675 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.425708 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.425737 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2ktc\" (UniqueName: \"kubernetes.io/projected/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-kube-api-access-j2ktc\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.425792 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.426155 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.426244 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.426472 4899 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.432171 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.433149 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.446048 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2ktc\" (UniqueName: \"kubernetes.io/projected/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-kube-api-access-j2ktc\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.461363 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.481702 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.919074 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 03 09:23:35 crc kubenswrapper[4899]: I1003 09:23:35.925470 4899 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 09:23:36 crc kubenswrapper[4899]: I1003 09:23:36.379687 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"39b95e3c-c5c3-44c9-a89f-490bcde4fc69","Type":"ContainerStarted","Data":"5f6d8ef96959df8ce55d4e0f6f77a04d3f81255980ebddb28a87d99f9ae6ec56"} Oct 03 09:24:02 crc kubenswrapper[4899]: E1003 09:24:02.755812 4899 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Oct 03 09:24:02 crc kubenswrapper[4899]: E1003 09:24:02.756544 4899 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j2ktc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(39b95e3c-c5c3-44c9-a89f-490bcde4fc69): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 09:24:02 crc kubenswrapper[4899]: E1003 09:24:02.757777 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="39b95e3c-c5c3-44c9-a89f-490bcde4fc69" Oct 03 09:24:03 crc kubenswrapper[4899]: E1003 09:24:03.644737 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="39b95e3c-c5c3-44c9-a89f-490bcde4fc69" Oct 03 09:24:18 crc kubenswrapper[4899]: I1003 09:24:18.770379 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"39b95e3c-c5c3-44c9-a89f-490bcde4fc69","Type":"ContainerStarted","Data":"beae13d63c6e4db84f8ae9a4303b9a7662fc6d81d777812ba3dcff45e2374eb9"} Oct 03 09:24:18 crc kubenswrapper[4899]: I1003 09:24:18.799168 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.783343349 podStartE2EDuration="44.799142596s" podCreationTimestamp="2025-10-03 09:23:34 +0000 UTC" firstStartedPulling="2025-10-03 09:23:35.925225045 +0000 UTC m=+2590.032709998" lastFinishedPulling="2025-10-03 09:24:16.941024292 +0000 UTC m=+2631.048509245" observedRunningTime="2025-10-03 09:24:18.792605351 +0000 UTC m=+2632.900090304" watchObservedRunningTime="2025-10-03 09:24:18.799142596 +0000 UTC m=+2632.906627559" Oct 03 09:24:42 crc kubenswrapper[4899]: I1003 09:24:42.198222 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:24:42 crc kubenswrapper[4899]: I1003 09:24:42.198752 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:25:07 crc kubenswrapper[4899]: I1003 09:25:07.274447 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-x4xgf"] Oct 03 09:25:07 crc kubenswrapper[4899]: I1003 09:25:07.281945 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x4xgf" Oct 03 09:25:07 crc kubenswrapper[4899]: I1003 09:25:07.315272 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x4xgf"] Oct 03 09:25:07 crc kubenswrapper[4899]: I1003 09:25:07.324087 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgvdr\" (UniqueName: \"kubernetes.io/projected/7091a075-501b-4f28-a415-96fa9bdfcf03-kube-api-access-rgvdr\") pod \"community-operators-x4xgf\" (UID: \"7091a075-501b-4f28-a415-96fa9bdfcf03\") " pod="openshift-marketplace/community-operators-x4xgf" Oct 03 09:25:07 crc kubenswrapper[4899]: I1003 09:25:07.324227 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7091a075-501b-4f28-a415-96fa9bdfcf03-catalog-content\") pod \"community-operators-x4xgf\" (UID: \"7091a075-501b-4f28-a415-96fa9bdfcf03\") " pod="openshift-marketplace/community-operators-x4xgf" Oct 03 09:25:07 crc kubenswrapper[4899]: I1003 09:25:07.324260 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7091a075-501b-4f28-a415-96fa9bdfcf03-utilities\") pod \"community-operators-x4xgf\" (UID: \"7091a075-501b-4f28-a415-96fa9bdfcf03\") " pod="openshift-marketplace/community-operators-x4xgf" Oct 03 09:25:07 crc kubenswrapper[4899]: I1003 09:25:07.426413 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgvdr\" (UniqueName: \"kubernetes.io/projected/7091a075-501b-4f28-a415-96fa9bdfcf03-kube-api-access-rgvdr\") pod \"community-operators-x4xgf\" (UID: \"7091a075-501b-4f28-a415-96fa9bdfcf03\") " pod="openshift-marketplace/community-operators-x4xgf" Oct 03 09:25:07 crc kubenswrapper[4899]: I1003 09:25:07.426787 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7091a075-501b-4f28-a415-96fa9bdfcf03-catalog-content\") pod \"community-operators-x4xgf\" (UID: \"7091a075-501b-4f28-a415-96fa9bdfcf03\") " pod="openshift-marketplace/community-operators-x4xgf" Oct 03 09:25:07 crc kubenswrapper[4899]: I1003 09:25:07.426819 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7091a075-501b-4f28-a415-96fa9bdfcf03-utilities\") pod \"community-operators-x4xgf\" (UID: \"7091a075-501b-4f28-a415-96fa9bdfcf03\") " pod="openshift-marketplace/community-operators-x4xgf" Oct 03 09:25:07 crc kubenswrapper[4899]: I1003 09:25:07.427271 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7091a075-501b-4f28-a415-96fa9bdfcf03-catalog-content\") pod \"community-operators-x4xgf\" (UID: \"7091a075-501b-4f28-a415-96fa9bdfcf03\") " pod="openshift-marketplace/community-operators-x4xgf" Oct 03 09:25:07 crc kubenswrapper[4899]: I1003 09:25:07.427373 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7091a075-501b-4f28-a415-96fa9bdfcf03-utilities\") pod \"community-operators-x4xgf\" (UID: \"7091a075-501b-4f28-a415-96fa9bdfcf03\") " pod="openshift-marketplace/community-operators-x4xgf" Oct 03 09:25:07 crc kubenswrapper[4899]: I1003 09:25:07.447019 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgvdr\" (UniqueName: \"kubernetes.io/projected/7091a075-501b-4f28-a415-96fa9bdfcf03-kube-api-access-rgvdr\") pod \"community-operators-x4xgf\" (UID: \"7091a075-501b-4f28-a415-96fa9bdfcf03\") " pod="openshift-marketplace/community-operators-x4xgf" Oct 03 09:25:07 crc kubenswrapper[4899]: I1003 09:25:07.633128 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x4xgf" Oct 03 09:25:08 crc kubenswrapper[4899]: I1003 09:25:08.143293 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x4xgf"] Oct 03 09:25:08 crc kubenswrapper[4899]: W1003 09:25:08.153301 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7091a075_501b_4f28_a415_96fa9bdfcf03.slice/crio-ceee8e72c32d79445c2c02b878447cf51da396283aac06b51c521f0b473694c6 WatchSource:0}: Error finding container ceee8e72c32d79445c2c02b878447cf51da396283aac06b51c521f0b473694c6: Status 404 returned error can't find the container with id ceee8e72c32d79445c2c02b878447cf51da396283aac06b51c521f0b473694c6 Oct 03 09:25:08 crc kubenswrapper[4899]: I1003 09:25:08.241710 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x4xgf" event={"ID":"7091a075-501b-4f28-a415-96fa9bdfcf03","Type":"ContainerStarted","Data":"ceee8e72c32d79445c2c02b878447cf51da396283aac06b51c521f0b473694c6"} Oct 03 09:25:09 crc kubenswrapper[4899]: I1003 09:25:09.251815 4899 generic.go:334] "Generic (PLEG): container finished" podID="7091a075-501b-4f28-a415-96fa9bdfcf03" containerID="2b4881bdb8f9dcdded766d1d5675c7e904ca4db7f2b7537cf7bcaa8a649fa0f2" exitCode=0 Oct 03 09:25:09 crc kubenswrapper[4899]: I1003 09:25:09.251866 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x4xgf" event={"ID":"7091a075-501b-4f28-a415-96fa9bdfcf03","Type":"ContainerDied","Data":"2b4881bdb8f9dcdded766d1d5675c7e904ca4db7f2b7537cf7bcaa8a649fa0f2"} Oct 03 09:25:10 crc kubenswrapper[4899]: I1003 09:25:10.264390 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x4xgf" event={"ID":"7091a075-501b-4f28-a415-96fa9bdfcf03","Type":"ContainerStarted","Data":"8a11bb7b12938517df5d00afacbfe526f676eac7cde59e7eb1e01d3a086eab01"} Oct 03 09:25:11 crc kubenswrapper[4899]: I1003 09:25:11.274723 4899 generic.go:334] "Generic (PLEG): container finished" podID="7091a075-501b-4f28-a415-96fa9bdfcf03" containerID="8a11bb7b12938517df5d00afacbfe526f676eac7cde59e7eb1e01d3a086eab01" exitCode=0 Oct 03 09:25:11 crc kubenswrapper[4899]: I1003 09:25:11.274777 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x4xgf" event={"ID":"7091a075-501b-4f28-a415-96fa9bdfcf03","Type":"ContainerDied","Data":"8a11bb7b12938517df5d00afacbfe526f676eac7cde59e7eb1e01d3a086eab01"} Oct 03 09:25:12 crc kubenswrapper[4899]: I1003 09:25:12.198329 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:25:12 crc kubenswrapper[4899]: I1003 09:25:12.198672 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:25:12 crc kubenswrapper[4899]: I1003 09:25:12.285529 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x4xgf" event={"ID":"7091a075-501b-4f28-a415-96fa9bdfcf03","Type":"ContainerStarted","Data":"2eef489d43bcc39ddaaa9983bdcc1019bb4aacbafc8cfa397d2c440f097ff73f"} Oct 03 09:25:12 crc kubenswrapper[4899]: I1003 09:25:12.302126 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-x4xgf" podStartSLOduration=2.807475051 podStartE2EDuration="5.302106342s" podCreationTimestamp="2025-10-03 09:25:07 +0000 UTC" firstStartedPulling="2025-10-03 09:25:09.254184314 +0000 UTC m=+2683.361669267" lastFinishedPulling="2025-10-03 09:25:11.748815605 +0000 UTC m=+2685.856300558" observedRunningTime="2025-10-03 09:25:12.301760381 +0000 UTC m=+2686.409245354" watchObservedRunningTime="2025-10-03 09:25:12.302106342 +0000 UTC m=+2686.409591295" Oct 03 09:25:17 crc kubenswrapper[4899]: I1003 09:25:17.633592 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-x4xgf" Oct 03 09:25:17 crc kubenswrapper[4899]: I1003 09:25:17.636974 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-x4xgf" Oct 03 09:25:17 crc kubenswrapper[4899]: I1003 09:25:17.682616 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-x4xgf" Oct 03 09:25:18 crc kubenswrapper[4899]: I1003 09:25:18.390245 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-x4xgf" Oct 03 09:25:18 crc kubenswrapper[4899]: I1003 09:25:18.918610 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x4xgf"] Oct 03 09:25:20 crc kubenswrapper[4899]: I1003 09:25:20.355755 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-x4xgf" podUID="7091a075-501b-4f28-a415-96fa9bdfcf03" containerName="registry-server" containerID="cri-o://2eef489d43bcc39ddaaa9983bdcc1019bb4aacbafc8cfa397d2c440f097ff73f" gracePeriod=2 Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.337718 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x4xgf" Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.375590 4899 generic.go:334] "Generic (PLEG): container finished" podID="7091a075-501b-4f28-a415-96fa9bdfcf03" containerID="2eef489d43bcc39ddaaa9983bdcc1019bb4aacbafc8cfa397d2c440f097ff73f" exitCode=0 Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.375643 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x4xgf" event={"ID":"7091a075-501b-4f28-a415-96fa9bdfcf03","Type":"ContainerDied","Data":"2eef489d43bcc39ddaaa9983bdcc1019bb4aacbafc8cfa397d2c440f097ff73f"} Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.375672 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x4xgf" Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.375678 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x4xgf" event={"ID":"7091a075-501b-4f28-a415-96fa9bdfcf03","Type":"ContainerDied","Data":"ceee8e72c32d79445c2c02b878447cf51da396283aac06b51c521f0b473694c6"} Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.375695 4899 scope.go:117] "RemoveContainer" containerID="2eef489d43bcc39ddaaa9983bdcc1019bb4aacbafc8cfa397d2c440f097ff73f" Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.394948 4899 scope.go:117] "RemoveContainer" containerID="8a11bb7b12938517df5d00afacbfe526f676eac7cde59e7eb1e01d3a086eab01" Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.402105 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7091a075-501b-4f28-a415-96fa9bdfcf03-utilities\") pod \"7091a075-501b-4f28-a415-96fa9bdfcf03\" (UID: \"7091a075-501b-4f28-a415-96fa9bdfcf03\") " Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.402198 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7091a075-501b-4f28-a415-96fa9bdfcf03-catalog-content\") pod \"7091a075-501b-4f28-a415-96fa9bdfcf03\" (UID: \"7091a075-501b-4f28-a415-96fa9bdfcf03\") " Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.402327 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgvdr\" (UniqueName: \"kubernetes.io/projected/7091a075-501b-4f28-a415-96fa9bdfcf03-kube-api-access-rgvdr\") pod \"7091a075-501b-4f28-a415-96fa9bdfcf03\" (UID: \"7091a075-501b-4f28-a415-96fa9bdfcf03\") " Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.402949 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7091a075-501b-4f28-a415-96fa9bdfcf03-utilities" (OuterVolumeSpecName: "utilities") pod "7091a075-501b-4f28-a415-96fa9bdfcf03" (UID: "7091a075-501b-4f28-a415-96fa9bdfcf03"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.408481 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7091a075-501b-4f28-a415-96fa9bdfcf03-kube-api-access-rgvdr" (OuterVolumeSpecName: "kube-api-access-rgvdr") pod "7091a075-501b-4f28-a415-96fa9bdfcf03" (UID: "7091a075-501b-4f28-a415-96fa9bdfcf03"). InnerVolumeSpecName "kube-api-access-rgvdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.416147 4899 scope.go:117] "RemoveContainer" containerID="2b4881bdb8f9dcdded766d1d5675c7e904ca4db7f2b7537cf7bcaa8a649fa0f2" Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.449846 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7091a075-501b-4f28-a415-96fa9bdfcf03-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7091a075-501b-4f28-a415-96fa9bdfcf03" (UID: "7091a075-501b-4f28-a415-96fa9bdfcf03"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.504401 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7091a075-501b-4f28-a415-96fa9bdfcf03-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.504440 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7091a075-501b-4f28-a415-96fa9bdfcf03-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.504455 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgvdr\" (UniqueName: \"kubernetes.io/projected/7091a075-501b-4f28-a415-96fa9bdfcf03-kube-api-access-rgvdr\") on node \"crc\" DevicePath \"\"" Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.510938 4899 scope.go:117] "RemoveContainer" containerID="2eef489d43bcc39ddaaa9983bdcc1019bb4aacbafc8cfa397d2c440f097ff73f" Oct 03 09:25:21 crc kubenswrapper[4899]: E1003 09:25:21.511350 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2eef489d43bcc39ddaaa9983bdcc1019bb4aacbafc8cfa397d2c440f097ff73f\": container with ID starting with 2eef489d43bcc39ddaaa9983bdcc1019bb4aacbafc8cfa397d2c440f097ff73f not found: ID does not exist" containerID="2eef489d43bcc39ddaaa9983bdcc1019bb4aacbafc8cfa397d2c440f097ff73f" Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.511379 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2eef489d43bcc39ddaaa9983bdcc1019bb4aacbafc8cfa397d2c440f097ff73f"} err="failed to get container status \"2eef489d43bcc39ddaaa9983bdcc1019bb4aacbafc8cfa397d2c440f097ff73f\": rpc error: code = NotFound desc = could not find container \"2eef489d43bcc39ddaaa9983bdcc1019bb4aacbafc8cfa397d2c440f097ff73f\": container with ID starting with 2eef489d43bcc39ddaaa9983bdcc1019bb4aacbafc8cfa397d2c440f097ff73f not found: ID does not exist" Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.511399 4899 scope.go:117] "RemoveContainer" containerID="8a11bb7b12938517df5d00afacbfe526f676eac7cde59e7eb1e01d3a086eab01" Oct 03 09:25:21 crc kubenswrapper[4899]: E1003 09:25:21.511697 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a11bb7b12938517df5d00afacbfe526f676eac7cde59e7eb1e01d3a086eab01\": container with ID starting with 8a11bb7b12938517df5d00afacbfe526f676eac7cde59e7eb1e01d3a086eab01 not found: ID does not exist" containerID="8a11bb7b12938517df5d00afacbfe526f676eac7cde59e7eb1e01d3a086eab01" Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.511745 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a11bb7b12938517df5d00afacbfe526f676eac7cde59e7eb1e01d3a086eab01"} err="failed to get container status \"8a11bb7b12938517df5d00afacbfe526f676eac7cde59e7eb1e01d3a086eab01\": rpc error: code = NotFound desc = could not find container \"8a11bb7b12938517df5d00afacbfe526f676eac7cde59e7eb1e01d3a086eab01\": container with ID starting with 8a11bb7b12938517df5d00afacbfe526f676eac7cde59e7eb1e01d3a086eab01 not found: ID does not exist" Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.511771 4899 scope.go:117] "RemoveContainer" containerID="2b4881bdb8f9dcdded766d1d5675c7e904ca4db7f2b7537cf7bcaa8a649fa0f2" Oct 03 09:25:21 crc kubenswrapper[4899]: E1003 09:25:21.512309 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b4881bdb8f9dcdded766d1d5675c7e904ca4db7f2b7537cf7bcaa8a649fa0f2\": container with ID starting with 2b4881bdb8f9dcdded766d1d5675c7e904ca4db7f2b7537cf7bcaa8a649fa0f2 not found: ID does not exist" containerID="2b4881bdb8f9dcdded766d1d5675c7e904ca4db7f2b7537cf7bcaa8a649fa0f2" Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.512346 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b4881bdb8f9dcdded766d1d5675c7e904ca4db7f2b7537cf7bcaa8a649fa0f2"} err="failed to get container status \"2b4881bdb8f9dcdded766d1d5675c7e904ca4db7f2b7537cf7bcaa8a649fa0f2\": rpc error: code = NotFound desc = could not find container \"2b4881bdb8f9dcdded766d1d5675c7e904ca4db7f2b7537cf7bcaa8a649fa0f2\": container with ID starting with 2b4881bdb8f9dcdded766d1d5675c7e904ca4db7f2b7537cf7bcaa8a649fa0f2 not found: ID does not exist" Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.711803 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x4xgf"] Oct 03 09:25:21 crc kubenswrapper[4899]: I1003 09:25:21.720022 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-x4xgf"] Oct 03 09:25:22 crc kubenswrapper[4899]: I1003 09:25:22.536995 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7091a075-501b-4f28-a415-96fa9bdfcf03" path="/var/lib/kubelet/pods/7091a075-501b-4f28-a415-96fa9bdfcf03/volumes" Oct 03 09:25:42 crc kubenswrapper[4899]: I1003 09:25:42.197819 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:25:42 crc kubenswrapper[4899]: I1003 09:25:42.198323 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:25:42 crc kubenswrapper[4899]: I1003 09:25:42.198374 4899 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 09:25:42 crc kubenswrapper[4899]: I1003 09:25:42.199093 4899 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d5dd56986e2dd2f071f94a8855f6a4fbd34efa104bfcd4d459ad9f6f9563b980"} pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 09:25:42 crc kubenswrapper[4899]: I1003 09:25:42.199138 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" containerID="cri-o://d5dd56986e2dd2f071f94a8855f6a4fbd34efa104bfcd4d459ad9f6f9563b980" gracePeriod=600 Oct 03 09:25:42 crc kubenswrapper[4899]: I1003 09:25:42.578842 4899 generic.go:334] "Generic (PLEG): container finished" podID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerID="d5dd56986e2dd2f071f94a8855f6a4fbd34efa104bfcd4d459ad9f6f9563b980" exitCode=0 Oct 03 09:25:42 crc kubenswrapper[4899]: I1003 09:25:42.578955 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerDied","Data":"d5dd56986e2dd2f071f94a8855f6a4fbd34efa104bfcd4d459ad9f6f9563b980"} Oct 03 09:25:42 crc kubenswrapper[4899]: I1003 09:25:42.579375 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerStarted","Data":"f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193"} Oct 03 09:25:42 crc kubenswrapper[4899]: I1003 09:25:42.579397 4899 scope.go:117] "RemoveContainer" containerID="ed54921bdc2336c96378284f4d6d47fbfc2a2004c934f824cdf10019c1549f27" Oct 03 09:25:49 crc kubenswrapper[4899]: I1003 09:25:49.372331 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-chp65"] Oct 03 09:25:49 crc kubenswrapper[4899]: E1003 09:25:49.373660 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7091a075-501b-4f28-a415-96fa9bdfcf03" containerName="extract-utilities" Oct 03 09:25:49 crc kubenswrapper[4899]: I1003 09:25:49.373678 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="7091a075-501b-4f28-a415-96fa9bdfcf03" containerName="extract-utilities" Oct 03 09:25:49 crc kubenswrapper[4899]: E1003 09:25:49.373707 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7091a075-501b-4f28-a415-96fa9bdfcf03" containerName="extract-content" Oct 03 09:25:49 crc kubenswrapper[4899]: I1003 09:25:49.373714 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="7091a075-501b-4f28-a415-96fa9bdfcf03" containerName="extract-content" Oct 03 09:25:49 crc kubenswrapper[4899]: E1003 09:25:49.373734 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7091a075-501b-4f28-a415-96fa9bdfcf03" containerName="registry-server" Oct 03 09:25:49 crc kubenswrapper[4899]: I1003 09:25:49.373740 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="7091a075-501b-4f28-a415-96fa9bdfcf03" containerName="registry-server" Oct 03 09:25:49 crc kubenswrapper[4899]: I1003 09:25:49.373963 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="7091a075-501b-4f28-a415-96fa9bdfcf03" containerName="registry-server" Oct 03 09:25:49 crc kubenswrapper[4899]: I1003 09:25:49.375901 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-chp65" Oct 03 09:25:49 crc kubenswrapper[4899]: I1003 09:25:49.404646 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-chp65"] Oct 03 09:25:49 crc kubenswrapper[4899]: I1003 09:25:49.481378 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khqwj\" (UniqueName: \"kubernetes.io/projected/4b7ef813-2cf3-4b4a-abda-ed0d321f5743-kube-api-access-khqwj\") pod \"certified-operators-chp65\" (UID: \"4b7ef813-2cf3-4b4a-abda-ed0d321f5743\") " pod="openshift-marketplace/certified-operators-chp65" Oct 03 09:25:49 crc kubenswrapper[4899]: I1003 09:25:49.481701 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b7ef813-2cf3-4b4a-abda-ed0d321f5743-utilities\") pod \"certified-operators-chp65\" (UID: \"4b7ef813-2cf3-4b4a-abda-ed0d321f5743\") " pod="openshift-marketplace/certified-operators-chp65" Oct 03 09:25:49 crc kubenswrapper[4899]: I1003 09:25:49.481779 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b7ef813-2cf3-4b4a-abda-ed0d321f5743-catalog-content\") pod \"certified-operators-chp65\" (UID: \"4b7ef813-2cf3-4b4a-abda-ed0d321f5743\") " pod="openshift-marketplace/certified-operators-chp65" Oct 03 09:25:49 crc kubenswrapper[4899]: I1003 09:25:49.584504 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khqwj\" (UniqueName: \"kubernetes.io/projected/4b7ef813-2cf3-4b4a-abda-ed0d321f5743-kube-api-access-khqwj\") pod \"certified-operators-chp65\" (UID: \"4b7ef813-2cf3-4b4a-abda-ed0d321f5743\") " pod="openshift-marketplace/certified-operators-chp65" Oct 03 09:25:49 crc kubenswrapper[4899]: I1003 09:25:49.584651 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b7ef813-2cf3-4b4a-abda-ed0d321f5743-utilities\") pod \"certified-operators-chp65\" (UID: \"4b7ef813-2cf3-4b4a-abda-ed0d321f5743\") " pod="openshift-marketplace/certified-operators-chp65" Oct 03 09:25:49 crc kubenswrapper[4899]: I1003 09:25:49.584722 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b7ef813-2cf3-4b4a-abda-ed0d321f5743-catalog-content\") pod \"certified-operators-chp65\" (UID: \"4b7ef813-2cf3-4b4a-abda-ed0d321f5743\") " pod="openshift-marketplace/certified-operators-chp65" Oct 03 09:25:49 crc kubenswrapper[4899]: I1003 09:25:49.585523 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b7ef813-2cf3-4b4a-abda-ed0d321f5743-catalog-content\") pod \"certified-operators-chp65\" (UID: \"4b7ef813-2cf3-4b4a-abda-ed0d321f5743\") " pod="openshift-marketplace/certified-operators-chp65" Oct 03 09:25:49 crc kubenswrapper[4899]: I1003 09:25:49.585572 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b7ef813-2cf3-4b4a-abda-ed0d321f5743-utilities\") pod \"certified-operators-chp65\" (UID: \"4b7ef813-2cf3-4b4a-abda-ed0d321f5743\") " pod="openshift-marketplace/certified-operators-chp65" Oct 03 09:25:49 crc kubenswrapper[4899]: I1003 09:25:49.604387 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khqwj\" (UniqueName: \"kubernetes.io/projected/4b7ef813-2cf3-4b4a-abda-ed0d321f5743-kube-api-access-khqwj\") pod \"certified-operators-chp65\" (UID: \"4b7ef813-2cf3-4b4a-abda-ed0d321f5743\") " pod="openshift-marketplace/certified-operators-chp65" Oct 03 09:25:49 crc kubenswrapper[4899]: I1003 09:25:49.700800 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-chp65" Oct 03 09:25:50 crc kubenswrapper[4899]: I1003 09:25:50.230960 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-chp65"] Oct 03 09:25:50 crc kubenswrapper[4899]: I1003 09:25:50.656552 4899 generic.go:334] "Generic (PLEG): container finished" podID="4b7ef813-2cf3-4b4a-abda-ed0d321f5743" containerID="a6133522e2dd050d0d006577df0443636f15b0bd7cb0f1194aba1463480436c2" exitCode=0 Oct 03 09:25:50 crc kubenswrapper[4899]: I1003 09:25:50.656694 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chp65" event={"ID":"4b7ef813-2cf3-4b4a-abda-ed0d321f5743","Type":"ContainerDied","Data":"a6133522e2dd050d0d006577df0443636f15b0bd7cb0f1194aba1463480436c2"} Oct 03 09:25:50 crc kubenswrapper[4899]: I1003 09:25:50.656870 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chp65" event={"ID":"4b7ef813-2cf3-4b4a-abda-ed0d321f5743","Type":"ContainerStarted","Data":"2e9c440759b6628b7e29f91acffd64265c890e782d7ce5aea559de107694b8b7"} Oct 03 09:25:51 crc kubenswrapper[4899]: I1003 09:25:51.668378 4899 generic.go:334] "Generic (PLEG): container finished" podID="4b7ef813-2cf3-4b4a-abda-ed0d321f5743" containerID="fad830f0a7ebfcbdae30ebee5be9c2fb7e8f893c8bafe937a452237543886f23" exitCode=0 Oct 03 09:25:51 crc kubenswrapper[4899]: I1003 09:25:51.668504 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chp65" event={"ID":"4b7ef813-2cf3-4b4a-abda-ed0d321f5743","Type":"ContainerDied","Data":"fad830f0a7ebfcbdae30ebee5be9c2fb7e8f893c8bafe937a452237543886f23"} Oct 03 09:25:52 crc kubenswrapper[4899]: I1003 09:25:52.680576 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chp65" event={"ID":"4b7ef813-2cf3-4b4a-abda-ed0d321f5743","Type":"ContainerStarted","Data":"d09446ddf50b0f67ec3d71a63776ae4bc99ca333fc3ddd788ab0c05d8e5bdec9"} Oct 03 09:25:52 crc kubenswrapper[4899]: I1003 09:25:52.704405 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-chp65" podStartSLOduration=2.257073816 podStartE2EDuration="3.704387403s" podCreationTimestamp="2025-10-03 09:25:49 +0000 UTC" firstStartedPulling="2025-10-03 09:25:50.658230359 +0000 UTC m=+2724.765715312" lastFinishedPulling="2025-10-03 09:25:52.105543946 +0000 UTC m=+2726.213028899" observedRunningTime="2025-10-03 09:25:52.69796762 +0000 UTC m=+2726.805452573" watchObservedRunningTime="2025-10-03 09:25:52.704387403 +0000 UTC m=+2726.811872356" Oct 03 09:25:59 crc kubenswrapper[4899]: I1003 09:25:59.701033 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-chp65" Oct 03 09:25:59 crc kubenswrapper[4899]: I1003 09:25:59.703422 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-chp65" Oct 03 09:25:59 crc kubenswrapper[4899]: I1003 09:25:59.765172 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-chp65" Oct 03 09:26:00 crc kubenswrapper[4899]: I1003 09:26:00.788332 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-chp65" Oct 03 09:26:00 crc kubenswrapper[4899]: I1003 09:26:00.834536 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-chp65"] Oct 03 09:26:02 crc kubenswrapper[4899]: I1003 09:26:02.763372 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-chp65" podUID="4b7ef813-2cf3-4b4a-abda-ed0d321f5743" containerName="registry-server" containerID="cri-o://d09446ddf50b0f67ec3d71a63776ae4bc99ca333fc3ddd788ab0c05d8e5bdec9" gracePeriod=2 Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.269711 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-chp65" Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.353142 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b7ef813-2cf3-4b4a-abda-ed0d321f5743-utilities\") pod \"4b7ef813-2cf3-4b4a-abda-ed0d321f5743\" (UID: \"4b7ef813-2cf3-4b4a-abda-ed0d321f5743\") " Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.353346 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b7ef813-2cf3-4b4a-abda-ed0d321f5743-catalog-content\") pod \"4b7ef813-2cf3-4b4a-abda-ed0d321f5743\" (UID: \"4b7ef813-2cf3-4b4a-abda-ed0d321f5743\") " Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.353431 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khqwj\" (UniqueName: \"kubernetes.io/projected/4b7ef813-2cf3-4b4a-abda-ed0d321f5743-kube-api-access-khqwj\") pod \"4b7ef813-2cf3-4b4a-abda-ed0d321f5743\" (UID: \"4b7ef813-2cf3-4b4a-abda-ed0d321f5743\") " Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.354650 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b7ef813-2cf3-4b4a-abda-ed0d321f5743-utilities" (OuterVolumeSpecName: "utilities") pod "4b7ef813-2cf3-4b4a-abda-ed0d321f5743" (UID: "4b7ef813-2cf3-4b4a-abda-ed0d321f5743"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.361022 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b7ef813-2cf3-4b4a-abda-ed0d321f5743-kube-api-access-khqwj" (OuterVolumeSpecName: "kube-api-access-khqwj") pod "4b7ef813-2cf3-4b4a-abda-ed0d321f5743" (UID: "4b7ef813-2cf3-4b4a-abda-ed0d321f5743"). InnerVolumeSpecName "kube-api-access-khqwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.396380 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b7ef813-2cf3-4b4a-abda-ed0d321f5743-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4b7ef813-2cf3-4b4a-abda-ed0d321f5743" (UID: "4b7ef813-2cf3-4b4a-abda-ed0d321f5743"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.457216 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b7ef813-2cf3-4b4a-abda-ed0d321f5743-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.457256 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b7ef813-2cf3-4b4a-abda-ed0d321f5743-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.457269 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khqwj\" (UniqueName: \"kubernetes.io/projected/4b7ef813-2cf3-4b4a-abda-ed0d321f5743-kube-api-access-khqwj\") on node \"crc\" DevicePath \"\"" Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.775596 4899 generic.go:334] "Generic (PLEG): container finished" podID="4b7ef813-2cf3-4b4a-abda-ed0d321f5743" containerID="d09446ddf50b0f67ec3d71a63776ae4bc99ca333fc3ddd788ab0c05d8e5bdec9" exitCode=0 Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.775664 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chp65" event={"ID":"4b7ef813-2cf3-4b4a-abda-ed0d321f5743","Type":"ContainerDied","Data":"d09446ddf50b0f67ec3d71a63776ae4bc99ca333fc3ddd788ab0c05d8e5bdec9"} Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.775707 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chp65" event={"ID":"4b7ef813-2cf3-4b4a-abda-ed0d321f5743","Type":"ContainerDied","Data":"2e9c440759b6628b7e29f91acffd64265c890e782d7ce5aea559de107694b8b7"} Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.775733 4899 scope.go:117] "RemoveContainer" containerID="d09446ddf50b0f67ec3d71a63776ae4bc99ca333fc3ddd788ab0c05d8e5bdec9" Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.775801 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-chp65" Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.817073 4899 scope.go:117] "RemoveContainer" containerID="fad830f0a7ebfcbdae30ebee5be9c2fb7e8f893c8bafe937a452237543886f23" Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.825955 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-chp65"] Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.834770 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-chp65"] Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.859818 4899 scope.go:117] "RemoveContainer" containerID="a6133522e2dd050d0d006577df0443636f15b0bd7cb0f1194aba1463480436c2" Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.902512 4899 scope.go:117] "RemoveContainer" containerID="d09446ddf50b0f67ec3d71a63776ae4bc99ca333fc3ddd788ab0c05d8e5bdec9" Oct 03 09:26:03 crc kubenswrapper[4899]: E1003 09:26:03.904030 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d09446ddf50b0f67ec3d71a63776ae4bc99ca333fc3ddd788ab0c05d8e5bdec9\": container with ID starting with d09446ddf50b0f67ec3d71a63776ae4bc99ca333fc3ddd788ab0c05d8e5bdec9 not found: ID does not exist" containerID="d09446ddf50b0f67ec3d71a63776ae4bc99ca333fc3ddd788ab0c05d8e5bdec9" Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.904101 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d09446ddf50b0f67ec3d71a63776ae4bc99ca333fc3ddd788ab0c05d8e5bdec9"} err="failed to get container status \"d09446ddf50b0f67ec3d71a63776ae4bc99ca333fc3ddd788ab0c05d8e5bdec9\": rpc error: code = NotFound desc = could not find container \"d09446ddf50b0f67ec3d71a63776ae4bc99ca333fc3ddd788ab0c05d8e5bdec9\": container with ID starting with d09446ddf50b0f67ec3d71a63776ae4bc99ca333fc3ddd788ab0c05d8e5bdec9 not found: ID does not exist" Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.904126 4899 scope.go:117] "RemoveContainer" containerID="fad830f0a7ebfcbdae30ebee5be9c2fb7e8f893c8bafe937a452237543886f23" Oct 03 09:26:03 crc kubenswrapper[4899]: E1003 09:26:03.904424 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fad830f0a7ebfcbdae30ebee5be9c2fb7e8f893c8bafe937a452237543886f23\": container with ID starting with fad830f0a7ebfcbdae30ebee5be9c2fb7e8f893c8bafe937a452237543886f23 not found: ID does not exist" containerID="fad830f0a7ebfcbdae30ebee5be9c2fb7e8f893c8bafe937a452237543886f23" Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.904459 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fad830f0a7ebfcbdae30ebee5be9c2fb7e8f893c8bafe937a452237543886f23"} err="failed to get container status \"fad830f0a7ebfcbdae30ebee5be9c2fb7e8f893c8bafe937a452237543886f23\": rpc error: code = NotFound desc = could not find container \"fad830f0a7ebfcbdae30ebee5be9c2fb7e8f893c8bafe937a452237543886f23\": container with ID starting with fad830f0a7ebfcbdae30ebee5be9c2fb7e8f893c8bafe937a452237543886f23 not found: ID does not exist" Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.904486 4899 scope.go:117] "RemoveContainer" containerID="a6133522e2dd050d0d006577df0443636f15b0bd7cb0f1194aba1463480436c2" Oct 03 09:26:03 crc kubenswrapper[4899]: E1003 09:26:03.904737 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6133522e2dd050d0d006577df0443636f15b0bd7cb0f1194aba1463480436c2\": container with ID starting with a6133522e2dd050d0d006577df0443636f15b0bd7cb0f1194aba1463480436c2 not found: ID does not exist" containerID="a6133522e2dd050d0d006577df0443636f15b0bd7cb0f1194aba1463480436c2" Oct 03 09:26:03 crc kubenswrapper[4899]: I1003 09:26:03.904767 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6133522e2dd050d0d006577df0443636f15b0bd7cb0f1194aba1463480436c2"} err="failed to get container status \"a6133522e2dd050d0d006577df0443636f15b0bd7cb0f1194aba1463480436c2\": rpc error: code = NotFound desc = could not find container \"a6133522e2dd050d0d006577df0443636f15b0bd7cb0f1194aba1463480436c2\": container with ID starting with a6133522e2dd050d0d006577df0443636f15b0bd7cb0f1194aba1463480436c2 not found: ID does not exist" Oct 03 09:26:04 crc kubenswrapper[4899]: I1003 09:26:04.537711 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b7ef813-2cf3-4b4a-abda-ed0d321f5743" path="/var/lib/kubelet/pods/4b7ef813-2cf3-4b4a-abda-ed0d321f5743/volumes" Oct 03 09:26:08 crc kubenswrapper[4899]: I1003 09:26:08.941225 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9tfwd"] Oct 03 09:26:08 crc kubenswrapper[4899]: E1003 09:26:08.942018 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b7ef813-2cf3-4b4a-abda-ed0d321f5743" containerName="extract-utilities" Oct 03 09:26:08 crc kubenswrapper[4899]: I1003 09:26:08.942033 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b7ef813-2cf3-4b4a-abda-ed0d321f5743" containerName="extract-utilities" Oct 03 09:26:08 crc kubenswrapper[4899]: E1003 09:26:08.942052 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b7ef813-2cf3-4b4a-abda-ed0d321f5743" containerName="registry-server" Oct 03 09:26:08 crc kubenswrapper[4899]: I1003 09:26:08.942058 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b7ef813-2cf3-4b4a-abda-ed0d321f5743" containerName="registry-server" Oct 03 09:26:08 crc kubenswrapper[4899]: E1003 09:26:08.942089 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b7ef813-2cf3-4b4a-abda-ed0d321f5743" containerName="extract-content" Oct 03 09:26:08 crc kubenswrapper[4899]: I1003 09:26:08.942095 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b7ef813-2cf3-4b4a-abda-ed0d321f5743" containerName="extract-content" Oct 03 09:26:08 crc kubenswrapper[4899]: I1003 09:26:08.942278 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b7ef813-2cf3-4b4a-abda-ed0d321f5743" containerName="registry-server" Oct 03 09:26:08 crc kubenswrapper[4899]: I1003 09:26:08.943635 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9tfwd" Oct 03 09:26:08 crc kubenswrapper[4899]: I1003 09:26:08.956884 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9tfwd"] Oct 03 09:26:08 crc kubenswrapper[4899]: I1003 09:26:08.968360 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b4ce89b-13a8-4114-9078-74feb9b1e145-catalog-content\") pod \"redhat-operators-9tfwd\" (UID: \"1b4ce89b-13a8-4114-9078-74feb9b1e145\") " pod="openshift-marketplace/redhat-operators-9tfwd" Oct 03 09:26:08 crc kubenswrapper[4899]: I1003 09:26:08.968445 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b4ce89b-13a8-4114-9078-74feb9b1e145-utilities\") pod \"redhat-operators-9tfwd\" (UID: \"1b4ce89b-13a8-4114-9078-74feb9b1e145\") " pod="openshift-marketplace/redhat-operators-9tfwd" Oct 03 09:26:08 crc kubenswrapper[4899]: I1003 09:26:08.968515 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ln4sz\" (UniqueName: \"kubernetes.io/projected/1b4ce89b-13a8-4114-9078-74feb9b1e145-kube-api-access-ln4sz\") pod \"redhat-operators-9tfwd\" (UID: \"1b4ce89b-13a8-4114-9078-74feb9b1e145\") " pod="openshift-marketplace/redhat-operators-9tfwd" Oct 03 09:26:09 crc kubenswrapper[4899]: I1003 09:26:09.070979 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ln4sz\" (UniqueName: \"kubernetes.io/projected/1b4ce89b-13a8-4114-9078-74feb9b1e145-kube-api-access-ln4sz\") pod \"redhat-operators-9tfwd\" (UID: \"1b4ce89b-13a8-4114-9078-74feb9b1e145\") " pod="openshift-marketplace/redhat-operators-9tfwd" Oct 03 09:26:09 crc kubenswrapper[4899]: I1003 09:26:09.071332 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b4ce89b-13a8-4114-9078-74feb9b1e145-catalog-content\") pod \"redhat-operators-9tfwd\" (UID: \"1b4ce89b-13a8-4114-9078-74feb9b1e145\") " pod="openshift-marketplace/redhat-operators-9tfwd" Oct 03 09:26:09 crc kubenswrapper[4899]: I1003 09:26:09.071520 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b4ce89b-13a8-4114-9078-74feb9b1e145-utilities\") pod \"redhat-operators-9tfwd\" (UID: \"1b4ce89b-13a8-4114-9078-74feb9b1e145\") " pod="openshift-marketplace/redhat-operators-9tfwd" Oct 03 09:26:09 crc kubenswrapper[4899]: I1003 09:26:09.072085 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b4ce89b-13a8-4114-9078-74feb9b1e145-utilities\") pod \"redhat-operators-9tfwd\" (UID: \"1b4ce89b-13a8-4114-9078-74feb9b1e145\") " pod="openshift-marketplace/redhat-operators-9tfwd" Oct 03 09:26:09 crc kubenswrapper[4899]: I1003 09:26:09.072291 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b4ce89b-13a8-4114-9078-74feb9b1e145-catalog-content\") pod \"redhat-operators-9tfwd\" (UID: \"1b4ce89b-13a8-4114-9078-74feb9b1e145\") " pod="openshift-marketplace/redhat-operators-9tfwd" Oct 03 09:26:09 crc kubenswrapper[4899]: I1003 09:26:09.100751 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ln4sz\" (UniqueName: \"kubernetes.io/projected/1b4ce89b-13a8-4114-9078-74feb9b1e145-kube-api-access-ln4sz\") pod \"redhat-operators-9tfwd\" (UID: \"1b4ce89b-13a8-4114-9078-74feb9b1e145\") " pod="openshift-marketplace/redhat-operators-9tfwd" Oct 03 09:26:09 crc kubenswrapper[4899]: I1003 09:26:09.272970 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9tfwd" Oct 03 09:26:09 crc kubenswrapper[4899]: I1003 09:26:09.742559 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9tfwd"] Oct 03 09:26:09 crc kubenswrapper[4899]: I1003 09:26:09.835553 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9tfwd" event={"ID":"1b4ce89b-13a8-4114-9078-74feb9b1e145","Type":"ContainerStarted","Data":"f1992a03c2101a1707d25a66287e3676319596b845d708d9df0761ea7235d4af"} Oct 03 09:26:10 crc kubenswrapper[4899]: I1003 09:26:10.845752 4899 generic.go:334] "Generic (PLEG): container finished" podID="1b4ce89b-13a8-4114-9078-74feb9b1e145" containerID="cbc79144269c36754e071d43a63858cd7a94315c8a45fe850410057964a7251e" exitCode=0 Oct 03 09:26:10 crc kubenswrapper[4899]: I1003 09:26:10.845875 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9tfwd" event={"ID":"1b4ce89b-13a8-4114-9078-74feb9b1e145","Type":"ContainerDied","Data":"cbc79144269c36754e071d43a63858cd7a94315c8a45fe850410057964a7251e"} Oct 03 09:26:11 crc kubenswrapper[4899]: I1003 09:26:11.856537 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9tfwd" event={"ID":"1b4ce89b-13a8-4114-9078-74feb9b1e145","Type":"ContainerStarted","Data":"71a88c6515db4bc0c3eb814df9d1fb4e276313a98381347046b329f9fa0723ea"} Oct 03 09:26:13 crc kubenswrapper[4899]: I1003 09:26:13.875544 4899 generic.go:334] "Generic (PLEG): container finished" podID="1b4ce89b-13a8-4114-9078-74feb9b1e145" containerID="71a88c6515db4bc0c3eb814df9d1fb4e276313a98381347046b329f9fa0723ea" exitCode=0 Oct 03 09:26:13 crc kubenswrapper[4899]: I1003 09:26:13.875594 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9tfwd" event={"ID":"1b4ce89b-13a8-4114-9078-74feb9b1e145","Type":"ContainerDied","Data":"71a88c6515db4bc0c3eb814df9d1fb4e276313a98381347046b329f9fa0723ea"} Oct 03 09:26:14 crc kubenswrapper[4899]: I1003 09:26:14.887545 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9tfwd" event={"ID":"1b4ce89b-13a8-4114-9078-74feb9b1e145","Type":"ContainerStarted","Data":"a595121b2758ee55b5731a2ca473dd41f74b43b99939e856598107629e329545"} Oct 03 09:26:14 crc kubenswrapper[4899]: I1003 09:26:14.927613 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9tfwd" podStartSLOduration=3.307003227 podStartE2EDuration="6.927591015s" podCreationTimestamp="2025-10-03 09:26:08 +0000 UTC" firstStartedPulling="2025-10-03 09:26:10.848506034 +0000 UTC m=+2744.955990987" lastFinishedPulling="2025-10-03 09:26:14.469093822 +0000 UTC m=+2748.576578775" observedRunningTime="2025-10-03 09:26:14.903927077 +0000 UTC m=+2749.011412040" watchObservedRunningTime="2025-10-03 09:26:14.927591015 +0000 UTC m=+2749.035075968" Oct 03 09:26:19 crc kubenswrapper[4899]: I1003 09:26:19.273135 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9tfwd" Oct 03 09:26:19 crc kubenswrapper[4899]: I1003 09:26:19.274556 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9tfwd" Oct 03 09:26:19 crc kubenswrapper[4899]: I1003 09:26:19.323258 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9tfwd" Oct 03 09:26:19 crc kubenswrapper[4899]: I1003 09:26:19.976567 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9tfwd" Oct 03 09:26:20 crc kubenswrapper[4899]: I1003 09:26:20.560466 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9tfwd"] Oct 03 09:26:21 crc kubenswrapper[4899]: I1003 09:26:21.944180 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9tfwd" podUID="1b4ce89b-13a8-4114-9078-74feb9b1e145" containerName="registry-server" containerID="cri-o://a595121b2758ee55b5731a2ca473dd41f74b43b99939e856598107629e329545" gracePeriod=2 Oct 03 09:26:22 crc kubenswrapper[4899]: I1003 09:26:22.397090 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9tfwd" Oct 03 09:26:22 crc kubenswrapper[4899]: I1003 09:26:22.533066 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ln4sz\" (UniqueName: \"kubernetes.io/projected/1b4ce89b-13a8-4114-9078-74feb9b1e145-kube-api-access-ln4sz\") pod \"1b4ce89b-13a8-4114-9078-74feb9b1e145\" (UID: \"1b4ce89b-13a8-4114-9078-74feb9b1e145\") " Oct 03 09:26:22 crc kubenswrapper[4899]: I1003 09:26:22.533150 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b4ce89b-13a8-4114-9078-74feb9b1e145-catalog-content\") pod \"1b4ce89b-13a8-4114-9078-74feb9b1e145\" (UID: \"1b4ce89b-13a8-4114-9078-74feb9b1e145\") " Oct 03 09:26:22 crc kubenswrapper[4899]: I1003 09:26:22.533192 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b4ce89b-13a8-4114-9078-74feb9b1e145-utilities\") pod \"1b4ce89b-13a8-4114-9078-74feb9b1e145\" (UID: \"1b4ce89b-13a8-4114-9078-74feb9b1e145\") " Oct 03 09:26:22 crc kubenswrapper[4899]: I1003 09:26:22.534336 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b4ce89b-13a8-4114-9078-74feb9b1e145-utilities" (OuterVolumeSpecName: "utilities") pod "1b4ce89b-13a8-4114-9078-74feb9b1e145" (UID: "1b4ce89b-13a8-4114-9078-74feb9b1e145"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:26:22 crc kubenswrapper[4899]: I1003 09:26:22.540420 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b4ce89b-13a8-4114-9078-74feb9b1e145-kube-api-access-ln4sz" (OuterVolumeSpecName: "kube-api-access-ln4sz") pod "1b4ce89b-13a8-4114-9078-74feb9b1e145" (UID: "1b4ce89b-13a8-4114-9078-74feb9b1e145"). InnerVolumeSpecName "kube-api-access-ln4sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:26:22 crc kubenswrapper[4899]: I1003 09:26:22.623124 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b4ce89b-13a8-4114-9078-74feb9b1e145-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1b4ce89b-13a8-4114-9078-74feb9b1e145" (UID: "1b4ce89b-13a8-4114-9078-74feb9b1e145"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:26:22 crc kubenswrapper[4899]: I1003 09:26:22.635457 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ln4sz\" (UniqueName: \"kubernetes.io/projected/1b4ce89b-13a8-4114-9078-74feb9b1e145-kube-api-access-ln4sz\") on node \"crc\" DevicePath \"\"" Oct 03 09:26:22 crc kubenswrapper[4899]: I1003 09:26:22.635650 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b4ce89b-13a8-4114-9078-74feb9b1e145-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 09:26:22 crc kubenswrapper[4899]: I1003 09:26:22.635740 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b4ce89b-13a8-4114-9078-74feb9b1e145-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 09:26:22 crc kubenswrapper[4899]: I1003 09:26:22.954600 4899 generic.go:334] "Generic (PLEG): container finished" podID="1b4ce89b-13a8-4114-9078-74feb9b1e145" containerID="a595121b2758ee55b5731a2ca473dd41f74b43b99939e856598107629e329545" exitCode=0 Oct 03 09:26:22 crc kubenswrapper[4899]: I1003 09:26:22.954648 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9tfwd" event={"ID":"1b4ce89b-13a8-4114-9078-74feb9b1e145","Type":"ContainerDied","Data":"a595121b2758ee55b5731a2ca473dd41f74b43b99939e856598107629e329545"} Oct 03 09:26:22 crc kubenswrapper[4899]: I1003 09:26:22.954675 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9tfwd" event={"ID":"1b4ce89b-13a8-4114-9078-74feb9b1e145","Type":"ContainerDied","Data":"f1992a03c2101a1707d25a66287e3676319596b845d708d9df0761ea7235d4af"} Oct 03 09:26:22 crc kubenswrapper[4899]: I1003 09:26:22.954692 4899 scope.go:117] "RemoveContainer" containerID="a595121b2758ee55b5731a2ca473dd41f74b43b99939e856598107629e329545" Oct 03 09:26:22 crc kubenswrapper[4899]: I1003 09:26:22.954828 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9tfwd" Oct 03 09:26:22 crc kubenswrapper[4899]: I1003 09:26:22.977634 4899 scope.go:117] "RemoveContainer" containerID="71a88c6515db4bc0c3eb814df9d1fb4e276313a98381347046b329f9fa0723ea" Oct 03 09:26:22 crc kubenswrapper[4899]: I1003 09:26:22.994055 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9tfwd"] Oct 03 09:26:23 crc kubenswrapper[4899]: I1003 09:26:23.000854 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9tfwd"] Oct 03 09:26:23 crc kubenswrapper[4899]: I1003 09:26:23.013559 4899 scope.go:117] "RemoveContainer" containerID="cbc79144269c36754e071d43a63858cd7a94315c8a45fe850410057964a7251e" Oct 03 09:26:23 crc kubenswrapper[4899]: I1003 09:26:23.051952 4899 scope.go:117] "RemoveContainer" containerID="a595121b2758ee55b5731a2ca473dd41f74b43b99939e856598107629e329545" Oct 03 09:26:23 crc kubenswrapper[4899]: E1003 09:26:23.053116 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a595121b2758ee55b5731a2ca473dd41f74b43b99939e856598107629e329545\": container with ID starting with a595121b2758ee55b5731a2ca473dd41f74b43b99939e856598107629e329545 not found: ID does not exist" containerID="a595121b2758ee55b5731a2ca473dd41f74b43b99939e856598107629e329545" Oct 03 09:26:23 crc kubenswrapper[4899]: I1003 09:26:23.053179 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a595121b2758ee55b5731a2ca473dd41f74b43b99939e856598107629e329545"} err="failed to get container status \"a595121b2758ee55b5731a2ca473dd41f74b43b99939e856598107629e329545\": rpc error: code = NotFound desc = could not find container \"a595121b2758ee55b5731a2ca473dd41f74b43b99939e856598107629e329545\": container with ID starting with a595121b2758ee55b5731a2ca473dd41f74b43b99939e856598107629e329545 not found: ID does not exist" Oct 03 09:26:23 crc kubenswrapper[4899]: I1003 09:26:23.053206 4899 scope.go:117] "RemoveContainer" containerID="71a88c6515db4bc0c3eb814df9d1fb4e276313a98381347046b329f9fa0723ea" Oct 03 09:26:23 crc kubenswrapper[4899]: E1003 09:26:23.053488 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71a88c6515db4bc0c3eb814df9d1fb4e276313a98381347046b329f9fa0723ea\": container with ID starting with 71a88c6515db4bc0c3eb814df9d1fb4e276313a98381347046b329f9fa0723ea not found: ID does not exist" containerID="71a88c6515db4bc0c3eb814df9d1fb4e276313a98381347046b329f9fa0723ea" Oct 03 09:26:23 crc kubenswrapper[4899]: I1003 09:26:23.053520 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71a88c6515db4bc0c3eb814df9d1fb4e276313a98381347046b329f9fa0723ea"} err="failed to get container status \"71a88c6515db4bc0c3eb814df9d1fb4e276313a98381347046b329f9fa0723ea\": rpc error: code = NotFound desc = could not find container \"71a88c6515db4bc0c3eb814df9d1fb4e276313a98381347046b329f9fa0723ea\": container with ID starting with 71a88c6515db4bc0c3eb814df9d1fb4e276313a98381347046b329f9fa0723ea not found: ID does not exist" Oct 03 09:26:23 crc kubenswrapper[4899]: I1003 09:26:23.053542 4899 scope.go:117] "RemoveContainer" containerID="cbc79144269c36754e071d43a63858cd7a94315c8a45fe850410057964a7251e" Oct 03 09:26:23 crc kubenswrapper[4899]: E1003 09:26:23.053932 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbc79144269c36754e071d43a63858cd7a94315c8a45fe850410057964a7251e\": container with ID starting with cbc79144269c36754e071d43a63858cd7a94315c8a45fe850410057964a7251e not found: ID does not exist" containerID="cbc79144269c36754e071d43a63858cd7a94315c8a45fe850410057964a7251e" Oct 03 09:26:23 crc kubenswrapper[4899]: I1003 09:26:23.053973 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbc79144269c36754e071d43a63858cd7a94315c8a45fe850410057964a7251e"} err="failed to get container status \"cbc79144269c36754e071d43a63858cd7a94315c8a45fe850410057964a7251e\": rpc error: code = NotFound desc = could not find container \"cbc79144269c36754e071d43a63858cd7a94315c8a45fe850410057964a7251e\": container with ID starting with cbc79144269c36754e071d43a63858cd7a94315c8a45fe850410057964a7251e not found: ID does not exist" Oct 03 09:26:24 crc kubenswrapper[4899]: I1003 09:26:24.537558 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b4ce89b-13a8-4114-9078-74feb9b1e145" path="/var/lib/kubelet/pods/1b4ce89b-13a8-4114-9078-74feb9b1e145/volumes" Oct 03 09:27:42 crc kubenswrapper[4899]: I1003 09:27:42.198128 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:27:42 crc kubenswrapper[4899]: I1003 09:27:42.198733 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:28:12 crc kubenswrapper[4899]: I1003 09:28:12.198292 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:28:12 crc kubenswrapper[4899]: I1003 09:28:12.198882 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:28:42 crc kubenswrapper[4899]: I1003 09:28:42.198413 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:28:42 crc kubenswrapper[4899]: I1003 09:28:42.198957 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:28:42 crc kubenswrapper[4899]: I1003 09:28:42.199002 4899 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 09:28:42 crc kubenswrapper[4899]: I1003 09:28:42.199787 4899 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193"} pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 09:28:42 crc kubenswrapper[4899]: I1003 09:28:42.199841 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" containerID="cri-o://f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" gracePeriod=600 Oct 03 09:28:42 crc kubenswrapper[4899]: E1003 09:28:42.319173 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:28:43 crc kubenswrapper[4899]: I1003 09:28:43.150262 4899 generic.go:334] "Generic (PLEG): container finished" podID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" exitCode=0 Oct 03 09:28:43 crc kubenswrapper[4899]: I1003 09:28:43.150306 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerDied","Data":"f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193"} Oct 03 09:28:43 crc kubenswrapper[4899]: I1003 09:28:43.150361 4899 scope.go:117] "RemoveContainer" containerID="d5dd56986e2dd2f071f94a8855f6a4fbd34efa104bfcd4d459ad9f6f9563b980" Oct 03 09:28:43 crc kubenswrapper[4899]: I1003 09:28:43.151134 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:28:43 crc kubenswrapper[4899]: E1003 09:28:43.151581 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:28:56 crc kubenswrapper[4899]: I1003 09:28:56.533812 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:28:56 crc kubenswrapper[4899]: E1003 09:28:56.534551 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:29:07 crc kubenswrapper[4899]: I1003 09:29:07.527521 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:29:07 crc kubenswrapper[4899]: E1003 09:29:07.528269 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:29:19 crc kubenswrapper[4899]: I1003 09:29:19.527883 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:29:19 crc kubenswrapper[4899]: E1003 09:29:19.529473 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:29:30 crc kubenswrapper[4899]: I1003 09:29:30.527223 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:29:30 crc kubenswrapper[4899]: E1003 09:29:30.528079 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:29:45 crc kubenswrapper[4899]: I1003 09:29:45.528050 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:29:45 crc kubenswrapper[4899]: E1003 09:29:45.528695 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:29:56 crc kubenswrapper[4899]: I1003 09:29:56.533001 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:29:56 crc kubenswrapper[4899]: E1003 09:29:56.533811 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.178120 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx"] Oct 03 09:30:00 crc kubenswrapper[4899]: E1003 09:30:00.179254 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b4ce89b-13a8-4114-9078-74feb9b1e145" containerName="extract-content" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.179274 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b4ce89b-13a8-4114-9078-74feb9b1e145" containerName="extract-content" Oct 03 09:30:00 crc kubenswrapper[4899]: E1003 09:30:00.179291 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b4ce89b-13a8-4114-9078-74feb9b1e145" containerName="registry-server" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.179298 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b4ce89b-13a8-4114-9078-74feb9b1e145" containerName="registry-server" Oct 03 09:30:00 crc kubenswrapper[4899]: E1003 09:30:00.179323 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b4ce89b-13a8-4114-9078-74feb9b1e145" containerName="extract-utilities" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.179353 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b4ce89b-13a8-4114-9078-74feb9b1e145" containerName="extract-utilities" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.179581 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b4ce89b-13a8-4114-9078-74feb9b1e145" containerName="registry-server" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.180267 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.183233 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.185529 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.190373 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx"] Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.325117 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/69234118-ba31-4924-869f-59b6ff05f4a0-secret-volume\") pod \"collect-profiles-29324730-bmmmx\" (UID: \"69234118-ba31-4924-869f-59b6ff05f4a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.325469 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tchfr\" (UniqueName: \"kubernetes.io/projected/69234118-ba31-4924-869f-59b6ff05f4a0-kube-api-access-tchfr\") pod \"collect-profiles-29324730-bmmmx\" (UID: \"69234118-ba31-4924-869f-59b6ff05f4a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.325571 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/69234118-ba31-4924-869f-59b6ff05f4a0-config-volume\") pod \"collect-profiles-29324730-bmmmx\" (UID: \"69234118-ba31-4924-869f-59b6ff05f4a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.427326 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/69234118-ba31-4924-869f-59b6ff05f4a0-secret-volume\") pod \"collect-profiles-29324730-bmmmx\" (UID: \"69234118-ba31-4924-869f-59b6ff05f4a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.427462 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tchfr\" (UniqueName: \"kubernetes.io/projected/69234118-ba31-4924-869f-59b6ff05f4a0-kube-api-access-tchfr\") pod \"collect-profiles-29324730-bmmmx\" (UID: \"69234118-ba31-4924-869f-59b6ff05f4a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.427501 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/69234118-ba31-4924-869f-59b6ff05f4a0-config-volume\") pod \"collect-profiles-29324730-bmmmx\" (UID: \"69234118-ba31-4924-869f-59b6ff05f4a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.428878 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/69234118-ba31-4924-869f-59b6ff05f4a0-config-volume\") pod \"collect-profiles-29324730-bmmmx\" (UID: \"69234118-ba31-4924-869f-59b6ff05f4a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.434312 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/69234118-ba31-4924-869f-59b6ff05f4a0-secret-volume\") pod \"collect-profiles-29324730-bmmmx\" (UID: \"69234118-ba31-4924-869f-59b6ff05f4a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.448648 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tchfr\" (UniqueName: \"kubernetes.io/projected/69234118-ba31-4924-869f-59b6ff05f4a0-kube-api-access-tchfr\") pod \"collect-profiles-29324730-bmmmx\" (UID: \"69234118-ba31-4924-869f-59b6ff05f4a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.518450 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx" Oct 03 09:30:00 crc kubenswrapper[4899]: I1003 09:30:00.961296 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx"] Oct 03 09:30:01 crc kubenswrapper[4899]: I1003 09:30:01.855947 4899 generic.go:334] "Generic (PLEG): container finished" podID="69234118-ba31-4924-869f-59b6ff05f4a0" containerID="7cfbb33ba096e7fc5d5a1e9b1fd5ab1f3278831d66e1e56278b59aa552096ed9" exitCode=0 Oct 03 09:30:01 crc kubenswrapper[4899]: I1003 09:30:01.856058 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx" event={"ID":"69234118-ba31-4924-869f-59b6ff05f4a0","Type":"ContainerDied","Data":"7cfbb33ba096e7fc5d5a1e9b1fd5ab1f3278831d66e1e56278b59aa552096ed9"} Oct 03 09:30:01 crc kubenswrapper[4899]: I1003 09:30:01.856353 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx" event={"ID":"69234118-ba31-4924-869f-59b6ff05f4a0","Type":"ContainerStarted","Data":"b83df7a8b2f3fc2d1435018a6e2db9829927ecbcda6d37f3602d05e546210583"} Oct 03 09:30:03 crc kubenswrapper[4899]: I1003 09:30:03.237161 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx" Oct 03 09:30:03 crc kubenswrapper[4899]: I1003 09:30:03.390011 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tchfr\" (UniqueName: \"kubernetes.io/projected/69234118-ba31-4924-869f-59b6ff05f4a0-kube-api-access-tchfr\") pod \"69234118-ba31-4924-869f-59b6ff05f4a0\" (UID: \"69234118-ba31-4924-869f-59b6ff05f4a0\") " Oct 03 09:30:03 crc kubenswrapper[4899]: I1003 09:30:03.390289 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/69234118-ba31-4924-869f-59b6ff05f4a0-secret-volume\") pod \"69234118-ba31-4924-869f-59b6ff05f4a0\" (UID: \"69234118-ba31-4924-869f-59b6ff05f4a0\") " Oct 03 09:30:03 crc kubenswrapper[4899]: I1003 09:30:03.390339 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/69234118-ba31-4924-869f-59b6ff05f4a0-config-volume\") pod \"69234118-ba31-4924-869f-59b6ff05f4a0\" (UID: \"69234118-ba31-4924-869f-59b6ff05f4a0\") " Oct 03 09:30:03 crc kubenswrapper[4899]: I1003 09:30:03.391306 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69234118-ba31-4924-869f-59b6ff05f4a0-config-volume" (OuterVolumeSpecName: "config-volume") pod "69234118-ba31-4924-869f-59b6ff05f4a0" (UID: "69234118-ba31-4924-869f-59b6ff05f4a0"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:30:03 crc kubenswrapper[4899]: I1003 09:30:03.396425 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69234118-ba31-4924-869f-59b6ff05f4a0-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "69234118-ba31-4924-869f-59b6ff05f4a0" (UID: "69234118-ba31-4924-869f-59b6ff05f4a0"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:30:03 crc kubenswrapper[4899]: I1003 09:30:03.396789 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69234118-ba31-4924-869f-59b6ff05f4a0-kube-api-access-tchfr" (OuterVolumeSpecName: "kube-api-access-tchfr") pod "69234118-ba31-4924-869f-59b6ff05f4a0" (UID: "69234118-ba31-4924-869f-59b6ff05f4a0"). InnerVolumeSpecName "kube-api-access-tchfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:30:03 crc kubenswrapper[4899]: I1003 09:30:03.492543 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tchfr\" (UniqueName: \"kubernetes.io/projected/69234118-ba31-4924-869f-59b6ff05f4a0-kube-api-access-tchfr\") on node \"crc\" DevicePath \"\"" Oct 03 09:30:03 crc kubenswrapper[4899]: I1003 09:30:03.492589 4899 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/69234118-ba31-4924-869f-59b6ff05f4a0-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 09:30:03 crc kubenswrapper[4899]: I1003 09:30:03.492607 4899 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/69234118-ba31-4924-869f-59b6ff05f4a0-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 09:30:03 crc kubenswrapper[4899]: I1003 09:30:03.872434 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx" event={"ID":"69234118-ba31-4924-869f-59b6ff05f4a0","Type":"ContainerDied","Data":"b83df7a8b2f3fc2d1435018a6e2db9829927ecbcda6d37f3602d05e546210583"} Oct 03 09:30:03 crc kubenswrapper[4899]: I1003 09:30:03.873064 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b83df7a8b2f3fc2d1435018a6e2db9829927ecbcda6d37f3602d05e546210583" Oct 03 09:30:03 crc kubenswrapper[4899]: I1003 09:30:03.872473 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324730-bmmmx" Oct 03 09:30:04 crc kubenswrapper[4899]: I1003 09:30:04.311874 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l"] Oct 03 09:30:04 crc kubenswrapper[4899]: I1003 09:30:04.319357 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324685-dcr2l"] Oct 03 09:30:04 crc kubenswrapper[4899]: I1003 09:30:04.537596 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1514d516-ecf8-473e-814a-5b675c7b23bc" path="/var/lib/kubelet/pods/1514d516-ecf8-473e-814a-5b675c7b23bc/volumes" Oct 03 09:30:11 crc kubenswrapper[4899]: I1003 09:30:11.527287 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:30:11 crc kubenswrapper[4899]: E1003 09:30:11.528185 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:30:22 crc kubenswrapper[4899]: I1003 09:30:22.528305 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:30:22 crc kubenswrapper[4899]: E1003 09:30:22.530024 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:30:35 crc kubenswrapper[4899]: I1003 09:30:35.527285 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:30:35 crc kubenswrapper[4899]: E1003 09:30:35.528087 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:30:49 crc kubenswrapper[4899]: I1003 09:30:49.527742 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:30:49 crc kubenswrapper[4899]: E1003 09:30:49.528644 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:30:50 crc kubenswrapper[4899]: I1003 09:30:50.320832 4899 scope.go:117] "RemoveContainer" containerID="6c5ff0f918cb3b89ddbfb13c42e83931df59f27fa30a62f18e1e97e84b539a09" Oct 03 09:31:03 crc kubenswrapper[4899]: I1003 09:31:03.526783 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:31:03 crc kubenswrapper[4899]: E1003 09:31:03.528545 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:31:12 crc kubenswrapper[4899]: I1003 09:31:12.721705 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8nrws"] Oct 03 09:31:12 crc kubenswrapper[4899]: E1003 09:31:12.723067 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69234118-ba31-4924-869f-59b6ff05f4a0" containerName="collect-profiles" Oct 03 09:31:12 crc kubenswrapper[4899]: I1003 09:31:12.723084 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="69234118-ba31-4924-869f-59b6ff05f4a0" containerName="collect-profiles" Oct 03 09:31:12 crc kubenswrapper[4899]: I1003 09:31:12.723322 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="69234118-ba31-4924-869f-59b6ff05f4a0" containerName="collect-profiles" Oct 03 09:31:12 crc kubenswrapper[4899]: I1003 09:31:12.746632 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8nrws" Oct 03 09:31:12 crc kubenswrapper[4899]: I1003 09:31:12.780092 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8nrws"] Oct 03 09:31:12 crc kubenswrapper[4899]: I1003 09:31:12.863292 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4d045bd-54d2-4690-b0fa-a94eb6cc098f-catalog-content\") pod \"redhat-marketplace-8nrws\" (UID: \"e4d045bd-54d2-4690-b0fa-a94eb6cc098f\") " pod="openshift-marketplace/redhat-marketplace-8nrws" Oct 03 09:31:12 crc kubenswrapper[4899]: I1003 09:31:12.863732 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4d045bd-54d2-4690-b0fa-a94eb6cc098f-utilities\") pod \"redhat-marketplace-8nrws\" (UID: \"e4d045bd-54d2-4690-b0fa-a94eb6cc098f\") " pod="openshift-marketplace/redhat-marketplace-8nrws" Oct 03 09:31:12 crc kubenswrapper[4899]: I1003 09:31:12.863773 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmwx9\" (UniqueName: \"kubernetes.io/projected/e4d045bd-54d2-4690-b0fa-a94eb6cc098f-kube-api-access-wmwx9\") pod \"redhat-marketplace-8nrws\" (UID: \"e4d045bd-54d2-4690-b0fa-a94eb6cc098f\") " pod="openshift-marketplace/redhat-marketplace-8nrws" Oct 03 09:31:12 crc kubenswrapper[4899]: I1003 09:31:12.965826 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4d045bd-54d2-4690-b0fa-a94eb6cc098f-catalog-content\") pod \"redhat-marketplace-8nrws\" (UID: \"e4d045bd-54d2-4690-b0fa-a94eb6cc098f\") " pod="openshift-marketplace/redhat-marketplace-8nrws" Oct 03 09:31:12 crc kubenswrapper[4899]: I1003 09:31:12.965955 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4d045bd-54d2-4690-b0fa-a94eb6cc098f-utilities\") pod \"redhat-marketplace-8nrws\" (UID: \"e4d045bd-54d2-4690-b0fa-a94eb6cc098f\") " pod="openshift-marketplace/redhat-marketplace-8nrws" Oct 03 09:31:12 crc kubenswrapper[4899]: I1003 09:31:12.965981 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmwx9\" (UniqueName: \"kubernetes.io/projected/e4d045bd-54d2-4690-b0fa-a94eb6cc098f-kube-api-access-wmwx9\") pod \"redhat-marketplace-8nrws\" (UID: \"e4d045bd-54d2-4690-b0fa-a94eb6cc098f\") " pod="openshift-marketplace/redhat-marketplace-8nrws" Oct 03 09:31:12 crc kubenswrapper[4899]: I1003 09:31:12.966459 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4d045bd-54d2-4690-b0fa-a94eb6cc098f-catalog-content\") pod \"redhat-marketplace-8nrws\" (UID: \"e4d045bd-54d2-4690-b0fa-a94eb6cc098f\") " pod="openshift-marketplace/redhat-marketplace-8nrws" Oct 03 09:31:12 crc kubenswrapper[4899]: I1003 09:31:12.966555 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4d045bd-54d2-4690-b0fa-a94eb6cc098f-utilities\") pod \"redhat-marketplace-8nrws\" (UID: \"e4d045bd-54d2-4690-b0fa-a94eb6cc098f\") " pod="openshift-marketplace/redhat-marketplace-8nrws" Oct 03 09:31:12 crc kubenswrapper[4899]: I1003 09:31:12.989773 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmwx9\" (UniqueName: \"kubernetes.io/projected/e4d045bd-54d2-4690-b0fa-a94eb6cc098f-kube-api-access-wmwx9\") pod \"redhat-marketplace-8nrws\" (UID: \"e4d045bd-54d2-4690-b0fa-a94eb6cc098f\") " pod="openshift-marketplace/redhat-marketplace-8nrws" Oct 03 09:31:13 crc kubenswrapper[4899]: I1003 09:31:13.089275 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8nrws" Oct 03 09:31:13 crc kubenswrapper[4899]: I1003 09:31:13.537320 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8nrws"] Oct 03 09:31:14 crc kubenswrapper[4899]: I1003 09:31:14.501950 4899 generic.go:334] "Generic (PLEG): container finished" podID="e4d045bd-54d2-4690-b0fa-a94eb6cc098f" containerID="a3a247b00a8de250468afbc0e8f7e7915e0750e56026575331939c68116e7b92" exitCode=0 Oct 03 09:31:14 crc kubenswrapper[4899]: I1003 09:31:14.502054 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8nrws" event={"ID":"e4d045bd-54d2-4690-b0fa-a94eb6cc098f","Type":"ContainerDied","Data":"a3a247b00a8de250468afbc0e8f7e7915e0750e56026575331939c68116e7b92"} Oct 03 09:31:14 crc kubenswrapper[4899]: I1003 09:31:14.502305 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8nrws" event={"ID":"e4d045bd-54d2-4690-b0fa-a94eb6cc098f","Type":"ContainerStarted","Data":"2338ff558522ef5ae7b98b0b48c1e82901ada956b5b3bde6a6beae3a067afdb7"} Oct 03 09:31:14 crc kubenswrapper[4899]: I1003 09:31:14.504170 4899 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 09:31:15 crc kubenswrapper[4899]: I1003 09:31:15.527506 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:31:15 crc kubenswrapper[4899]: E1003 09:31:15.528104 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:31:16 crc kubenswrapper[4899]: I1003 09:31:16.525243 4899 generic.go:334] "Generic (PLEG): container finished" podID="e4d045bd-54d2-4690-b0fa-a94eb6cc098f" containerID="fe4a6ffb35fb4e137351efb5c206f6aec97ea3687e5a8dede2b87d167c9e9354" exitCode=0 Oct 03 09:31:16 crc kubenswrapper[4899]: I1003 09:31:16.525459 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8nrws" event={"ID":"e4d045bd-54d2-4690-b0fa-a94eb6cc098f","Type":"ContainerDied","Data":"fe4a6ffb35fb4e137351efb5c206f6aec97ea3687e5a8dede2b87d167c9e9354"} Oct 03 09:31:17 crc kubenswrapper[4899]: I1003 09:31:17.536603 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8nrws" event={"ID":"e4d045bd-54d2-4690-b0fa-a94eb6cc098f","Type":"ContainerStarted","Data":"f235c5a147b456a8647303847cdeab97870cd534b29bd2328e74117f68a1aaf9"} Oct 03 09:31:17 crc kubenswrapper[4899]: I1003 09:31:17.563118 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8nrws" podStartSLOduration=3.079618599 podStartE2EDuration="5.563096089s" podCreationTimestamp="2025-10-03 09:31:12 +0000 UTC" firstStartedPulling="2025-10-03 09:31:14.503903204 +0000 UTC m=+3048.611388157" lastFinishedPulling="2025-10-03 09:31:16.987380694 +0000 UTC m=+3051.094865647" observedRunningTime="2025-10-03 09:31:17.553674552 +0000 UTC m=+3051.661159515" watchObservedRunningTime="2025-10-03 09:31:17.563096089 +0000 UTC m=+3051.670581042" Oct 03 09:31:23 crc kubenswrapper[4899]: I1003 09:31:23.089837 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8nrws" Oct 03 09:31:23 crc kubenswrapper[4899]: I1003 09:31:23.090325 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8nrws" Oct 03 09:31:23 crc kubenswrapper[4899]: I1003 09:31:23.141799 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8nrws" Oct 03 09:31:23 crc kubenswrapper[4899]: I1003 09:31:23.635217 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8nrws" Oct 03 09:31:23 crc kubenswrapper[4899]: I1003 09:31:23.681490 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8nrws"] Oct 03 09:31:25 crc kubenswrapper[4899]: I1003 09:31:25.604646 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8nrws" podUID="e4d045bd-54d2-4690-b0fa-a94eb6cc098f" containerName="registry-server" containerID="cri-o://f235c5a147b456a8647303847cdeab97870cd534b29bd2328e74117f68a1aaf9" gracePeriod=2 Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.124551 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8nrws" Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.231431 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4d045bd-54d2-4690-b0fa-a94eb6cc098f-catalog-content\") pod \"e4d045bd-54d2-4690-b0fa-a94eb6cc098f\" (UID: \"e4d045bd-54d2-4690-b0fa-a94eb6cc098f\") " Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.231524 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4d045bd-54d2-4690-b0fa-a94eb6cc098f-utilities\") pod \"e4d045bd-54d2-4690-b0fa-a94eb6cc098f\" (UID: \"e4d045bd-54d2-4690-b0fa-a94eb6cc098f\") " Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.231908 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmwx9\" (UniqueName: \"kubernetes.io/projected/e4d045bd-54d2-4690-b0fa-a94eb6cc098f-kube-api-access-wmwx9\") pod \"e4d045bd-54d2-4690-b0fa-a94eb6cc098f\" (UID: \"e4d045bd-54d2-4690-b0fa-a94eb6cc098f\") " Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.232529 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4d045bd-54d2-4690-b0fa-a94eb6cc098f-utilities" (OuterVolumeSpecName: "utilities") pod "e4d045bd-54d2-4690-b0fa-a94eb6cc098f" (UID: "e4d045bd-54d2-4690-b0fa-a94eb6cc098f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.232997 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4d045bd-54d2-4690-b0fa-a94eb6cc098f-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.238456 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4d045bd-54d2-4690-b0fa-a94eb6cc098f-kube-api-access-wmwx9" (OuterVolumeSpecName: "kube-api-access-wmwx9") pod "e4d045bd-54d2-4690-b0fa-a94eb6cc098f" (UID: "e4d045bd-54d2-4690-b0fa-a94eb6cc098f"). InnerVolumeSpecName "kube-api-access-wmwx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.245071 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4d045bd-54d2-4690-b0fa-a94eb6cc098f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e4d045bd-54d2-4690-b0fa-a94eb6cc098f" (UID: "e4d045bd-54d2-4690-b0fa-a94eb6cc098f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.335958 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4d045bd-54d2-4690-b0fa-a94eb6cc098f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.336021 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmwx9\" (UniqueName: \"kubernetes.io/projected/e4d045bd-54d2-4690-b0fa-a94eb6cc098f-kube-api-access-wmwx9\") on node \"crc\" DevicePath \"\"" Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.615424 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8nrws" Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.615443 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8nrws" event={"ID":"e4d045bd-54d2-4690-b0fa-a94eb6cc098f","Type":"ContainerDied","Data":"f235c5a147b456a8647303847cdeab97870cd534b29bd2328e74117f68a1aaf9"} Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.615506 4899 scope.go:117] "RemoveContainer" containerID="f235c5a147b456a8647303847cdeab97870cd534b29bd2328e74117f68a1aaf9" Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.616668 4899 generic.go:334] "Generic (PLEG): container finished" podID="e4d045bd-54d2-4690-b0fa-a94eb6cc098f" containerID="f235c5a147b456a8647303847cdeab97870cd534b29bd2328e74117f68a1aaf9" exitCode=0 Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.616732 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8nrws" event={"ID":"e4d045bd-54d2-4690-b0fa-a94eb6cc098f","Type":"ContainerDied","Data":"2338ff558522ef5ae7b98b0b48c1e82901ada956b5b3bde6a6beae3a067afdb7"} Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.640613 4899 scope.go:117] "RemoveContainer" containerID="fe4a6ffb35fb4e137351efb5c206f6aec97ea3687e5a8dede2b87d167c9e9354" Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.648042 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8nrws"] Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.668242 4899 scope.go:117] "RemoveContainer" containerID="a3a247b00a8de250468afbc0e8f7e7915e0750e56026575331939c68116e7b92" Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.669970 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8nrws"] Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.719473 4899 scope.go:117] "RemoveContainer" containerID="f235c5a147b456a8647303847cdeab97870cd534b29bd2328e74117f68a1aaf9" Oct 03 09:31:26 crc kubenswrapper[4899]: E1003 09:31:26.720028 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f235c5a147b456a8647303847cdeab97870cd534b29bd2328e74117f68a1aaf9\": container with ID starting with f235c5a147b456a8647303847cdeab97870cd534b29bd2328e74117f68a1aaf9 not found: ID does not exist" containerID="f235c5a147b456a8647303847cdeab97870cd534b29bd2328e74117f68a1aaf9" Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.720077 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f235c5a147b456a8647303847cdeab97870cd534b29bd2328e74117f68a1aaf9"} err="failed to get container status \"f235c5a147b456a8647303847cdeab97870cd534b29bd2328e74117f68a1aaf9\": rpc error: code = NotFound desc = could not find container \"f235c5a147b456a8647303847cdeab97870cd534b29bd2328e74117f68a1aaf9\": container with ID starting with f235c5a147b456a8647303847cdeab97870cd534b29bd2328e74117f68a1aaf9 not found: ID does not exist" Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.720096 4899 scope.go:117] "RemoveContainer" containerID="fe4a6ffb35fb4e137351efb5c206f6aec97ea3687e5a8dede2b87d167c9e9354" Oct 03 09:31:26 crc kubenswrapper[4899]: E1003 09:31:26.720377 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe4a6ffb35fb4e137351efb5c206f6aec97ea3687e5a8dede2b87d167c9e9354\": container with ID starting with fe4a6ffb35fb4e137351efb5c206f6aec97ea3687e5a8dede2b87d167c9e9354 not found: ID does not exist" containerID="fe4a6ffb35fb4e137351efb5c206f6aec97ea3687e5a8dede2b87d167c9e9354" Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.720414 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe4a6ffb35fb4e137351efb5c206f6aec97ea3687e5a8dede2b87d167c9e9354"} err="failed to get container status \"fe4a6ffb35fb4e137351efb5c206f6aec97ea3687e5a8dede2b87d167c9e9354\": rpc error: code = NotFound desc = could not find container \"fe4a6ffb35fb4e137351efb5c206f6aec97ea3687e5a8dede2b87d167c9e9354\": container with ID starting with fe4a6ffb35fb4e137351efb5c206f6aec97ea3687e5a8dede2b87d167c9e9354 not found: ID does not exist" Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.720427 4899 scope.go:117] "RemoveContainer" containerID="a3a247b00a8de250468afbc0e8f7e7915e0750e56026575331939c68116e7b92" Oct 03 09:31:26 crc kubenswrapper[4899]: E1003 09:31:26.720675 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3a247b00a8de250468afbc0e8f7e7915e0750e56026575331939c68116e7b92\": container with ID starting with a3a247b00a8de250468afbc0e8f7e7915e0750e56026575331939c68116e7b92 not found: ID does not exist" containerID="a3a247b00a8de250468afbc0e8f7e7915e0750e56026575331939c68116e7b92" Oct 03 09:31:26 crc kubenswrapper[4899]: I1003 09:31:26.720703 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3a247b00a8de250468afbc0e8f7e7915e0750e56026575331939c68116e7b92"} err="failed to get container status \"a3a247b00a8de250468afbc0e8f7e7915e0750e56026575331939c68116e7b92\": rpc error: code = NotFound desc = could not find container \"a3a247b00a8de250468afbc0e8f7e7915e0750e56026575331939c68116e7b92\": container with ID starting with a3a247b00a8de250468afbc0e8f7e7915e0750e56026575331939c68116e7b92 not found: ID does not exist" Oct 03 09:31:27 crc kubenswrapper[4899]: I1003 09:31:27.528453 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:31:27 crc kubenswrapper[4899]: E1003 09:31:27.529153 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:31:28 crc kubenswrapper[4899]: I1003 09:31:28.539328 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4d045bd-54d2-4690-b0fa-a94eb6cc098f" path="/var/lib/kubelet/pods/e4d045bd-54d2-4690-b0fa-a94eb6cc098f/volumes" Oct 03 09:31:34 crc kubenswrapper[4899]: E1003 09:31:34.856496 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4d045bd_54d2_4690_b0fa_a94eb6cc098f.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4d045bd_54d2_4690_b0fa_a94eb6cc098f.slice/crio-2338ff558522ef5ae7b98b0b48c1e82901ada956b5b3bde6a6beae3a067afdb7\": RecentStats: unable to find data in memory cache]" Oct 03 09:31:41 crc kubenswrapper[4899]: I1003 09:31:41.527500 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:31:41 crc kubenswrapper[4899]: E1003 09:31:41.528452 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:31:45 crc kubenswrapper[4899]: E1003 09:31:45.134577 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4d045bd_54d2_4690_b0fa_a94eb6cc098f.slice/crio-2338ff558522ef5ae7b98b0b48c1e82901ada956b5b3bde6a6beae3a067afdb7\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4d045bd_54d2_4690_b0fa_a94eb6cc098f.slice\": RecentStats: unable to find data in memory cache]" Oct 03 09:31:52 crc kubenswrapper[4899]: I1003 09:31:52.526754 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:31:52 crc kubenswrapper[4899]: E1003 09:31:52.527490 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:31:55 crc kubenswrapper[4899]: E1003 09:31:55.384339 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4d045bd_54d2_4690_b0fa_a94eb6cc098f.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4d045bd_54d2_4690_b0fa_a94eb6cc098f.slice/crio-2338ff558522ef5ae7b98b0b48c1e82901ada956b5b3bde6a6beae3a067afdb7\": RecentStats: unable to find data in memory cache]" Oct 03 09:32:05 crc kubenswrapper[4899]: I1003 09:32:05.526698 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:32:05 crc kubenswrapper[4899]: E1003 09:32:05.527821 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:32:05 crc kubenswrapper[4899]: E1003 09:32:05.621282 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4d045bd_54d2_4690_b0fa_a94eb6cc098f.slice/crio-2338ff558522ef5ae7b98b0b48c1e82901ada956b5b3bde6a6beae3a067afdb7\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4d045bd_54d2_4690_b0fa_a94eb6cc098f.slice\": RecentStats: unable to find data in memory cache]" Oct 03 09:32:15 crc kubenswrapper[4899]: E1003 09:32:15.875139 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4d045bd_54d2_4690_b0fa_a94eb6cc098f.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4d045bd_54d2_4690_b0fa_a94eb6cc098f.slice/crio-2338ff558522ef5ae7b98b0b48c1e82901ada956b5b3bde6a6beae3a067afdb7\": RecentStats: unable to find data in memory cache]" Oct 03 09:32:18 crc kubenswrapper[4899]: I1003 09:32:18.527010 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:32:18 crc kubenswrapper[4899]: E1003 09:32:18.527810 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:32:26 crc kubenswrapper[4899]: E1003 09:32:26.116528 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4d045bd_54d2_4690_b0fa_a94eb6cc098f.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4d045bd_54d2_4690_b0fa_a94eb6cc098f.slice/crio-2338ff558522ef5ae7b98b0b48c1e82901ada956b5b3bde6a6beae3a067afdb7\": RecentStats: unable to find data in memory cache]" Oct 03 09:32:26 crc kubenswrapper[4899]: E1003 09:32:26.562739 4899 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/d26da8d6d1f2a951bbb5d8b092a0649592925139c78f27a346c94946ca93264b/diff" to get inode usage: stat /var/lib/containers/storage/overlay/d26da8d6d1f2a951bbb5d8b092a0649592925139c78f27a346c94946ca93264b/diff: no such file or directory, extraDiskErr: Oct 03 09:32:32 crc kubenswrapper[4899]: I1003 09:32:32.526642 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:32:32 crc kubenswrapper[4899]: E1003 09:32:32.528818 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:32:47 crc kubenswrapper[4899]: I1003 09:32:47.527614 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:32:47 crc kubenswrapper[4899]: E1003 09:32:47.528586 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:33:01 crc kubenswrapper[4899]: I1003 09:33:01.528085 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:33:01 crc kubenswrapper[4899]: E1003 09:33:01.528845 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:33:13 crc kubenswrapper[4899]: I1003 09:33:13.527568 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:33:13 crc kubenswrapper[4899]: E1003 09:33:13.528471 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:33:24 crc kubenswrapper[4899]: I1003 09:33:24.528343 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:33:24 crc kubenswrapper[4899]: E1003 09:33:24.529100 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:33:35 crc kubenswrapper[4899]: I1003 09:33:35.527537 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:33:35 crc kubenswrapper[4899]: E1003 09:33:35.528423 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:33:47 crc kubenswrapper[4899]: I1003 09:33:47.527508 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:33:47 crc kubenswrapper[4899]: I1003 09:33:47.997317 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerStarted","Data":"512c2485fabb512b76ede2aa8b2cf298ff6f84b744dde89782c0144994984843"} Oct 03 09:35:05 crc kubenswrapper[4899]: I1003 09:35:05.649781 4899 generic.go:334] "Generic (PLEG): container finished" podID="39b95e3c-c5c3-44c9-a89f-490bcde4fc69" containerID="beae13d63c6e4db84f8ae9a4303b9a7662fc6d81d777812ba3dcff45e2374eb9" exitCode=0 Oct 03 09:35:05 crc kubenswrapper[4899]: I1003 09:35:05.649861 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"39b95e3c-c5c3-44c9-a89f-490bcde4fc69","Type":"ContainerDied","Data":"beae13d63c6e4db84f8ae9a4303b9a7662fc6d81d777812ba3dcff45e2374eb9"} Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.013273 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.129848 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-openstack-config-secret\") pod \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.129988 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-config-data\") pod \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.130076 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-ca-certs\") pod \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.130128 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.130212 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-test-operator-ephemeral-temporary\") pod \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.130236 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-ssh-key\") pod \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.130269 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2ktc\" (UniqueName: \"kubernetes.io/projected/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-kube-api-access-j2ktc\") pod \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.130322 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-test-operator-ephemeral-workdir\") pod \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.130369 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-openstack-config\") pod \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\" (UID: \"39b95e3c-c5c3-44c9-a89f-490bcde4fc69\") " Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.130684 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "39b95e3c-c5c3-44c9-a89f-490bcde4fc69" (UID: "39b95e3c-c5c3-44c9-a89f-490bcde4fc69"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.130784 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-config-data" (OuterVolumeSpecName: "config-data") pod "39b95e3c-c5c3-44c9-a89f-490bcde4fc69" (UID: "39b95e3c-c5c3-44c9-a89f-490bcde4fc69"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.131634 4899 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.131666 4899 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.135060 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "test-operator-logs") pod "39b95e3c-c5c3-44c9-a89f-490bcde4fc69" (UID: "39b95e3c-c5c3-44c9-a89f-490bcde4fc69"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.136370 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "39b95e3c-c5c3-44c9-a89f-490bcde4fc69" (UID: "39b95e3c-c5c3-44c9-a89f-490bcde4fc69"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.136567 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-kube-api-access-j2ktc" (OuterVolumeSpecName: "kube-api-access-j2ktc") pod "39b95e3c-c5c3-44c9-a89f-490bcde4fc69" (UID: "39b95e3c-c5c3-44c9-a89f-490bcde4fc69"). InnerVolumeSpecName "kube-api-access-j2ktc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.156788 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "39b95e3c-c5c3-44c9-a89f-490bcde4fc69" (UID: "39b95e3c-c5c3-44c9-a89f-490bcde4fc69"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.157363 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "39b95e3c-c5c3-44c9-a89f-490bcde4fc69" (UID: "39b95e3c-c5c3-44c9-a89f-490bcde4fc69"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.162756 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "39b95e3c-c5c3-44c9-a89f-490bcde4fc69" (UID: "39b95e3c-c5c3-44c9-a89f-490bcde4fc69"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.176880 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "39b95e3c-c5c3-44c9-a89f-490bcde4fc69" (UID: "39b95e3c-c5c3-44c9-a89f-490bcde4fc69"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.233744 4899 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.233785 4899 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.233826 4899 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.233839 4899 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.233849 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2ktc\" (UniqueName: \"kubernetes.io/projected/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-kube-api-access-j2ktc\") on node \"crc\" DevicePath \"\"" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.233862 4899 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.233873 4899 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/39b95e3c-c5c3-44c9-a89f-490bcde4fc69-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.256053 4899 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.335432 4899 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.668039 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"39b95e3c-c5c3-44c9-a89f-490bcde4fc69","Type":"ContainerDied","Data":"5f6d8ef96959df8ce55d4e0f6f77a04d3f81255980ebddb28a87d99f9ae6ec56"} Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.668359 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f6d8ef96959df8ce55d4e0f6f77a04d3f81255980ebddb28a87d99f9ae6ec56" Oct 03 09:35:07 crc kubenswrapper[4899]: I1003 09:35:07.668119 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 03 09:35:14 crc kubenswrapper[4899]: I1003 09:35:14.713466 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 03 09:35:14 crc kubenswrapper[4899]: E1003 09:35:14.714638 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39b95e3c-c5c3-44c9-a89f-490bcde4fc69" containerName="tempest-tests-tempest-tests-runner" Oct 03 09:35:14 crc kubenswrapper[4899]: I1003 09:35:14.714656 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="39b95e3c-c5c3-44c9-a89f-490bcde4fc69" containerName="tempest-tests-tempest-tests-runner" Oct 03 09:35:14 crc kubenswrapper[4899]: E1003 09:35:14.714671 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4d045bd-54d2-4690-b0fa-a94eb6cc098f" containerName="extract-utilities" Oct 03 09:35:14 crc kubenswrapper[4899]: I1003 09:35:14.714680 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4d045bd-54d2-4690-b0fa-a94eb6cc098f" containerName="extract-utilities" Oct 03 09:35:14 crc kubenswrapper[4899]: E1003 09:35:14.714703 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4d045bd-54d2-4690-b0fa-a94eb6cc098f" containerName="extract-content" Oct 03 09:35:14 crc kubenswrapper[4899]: I1003 09:35:14.714712 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4d045bd-54d2-4690-b0fa-a94eb6cc098f" containerName="extract-content" Oct 03 09:35:14 crc kubenswrapper[4899]: E1003 09:35:14.714739 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4d045bd-54d2-4690-b0fa-a94eb6cc098f" containerName="registry-server" Oct 03 09:35:14 crc kubenswrapper[4899]: I1003 09:35:14.714748 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4d045bd-54d2-4690-b0fa-a94eb6cc098f" containerName="registry-server" Oct 03 09:35:14 crc kubenswrapper[4899]: I1003 09:35:14.715061 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="39b95e3c-c5c3-44c9-a89f-490bcde4fc69" containerName="tempest-tests-tempest-tests-runner" Oct 03 09:35:14 crc kubenswrapper[4899]: I1003 09:35:14.715083 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4d045bd-54d2-4690-b0fa-a94eb6cc098f" containerName="registry-server" Oct 03 09:35:14 crc kubenswrapper[4899]: I1003 09:35:14.716001 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 03 09:35:14 crc kubenswrapper[4899]: I1003 09:35:14.719177 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-bfrgk" Oct 03 09:35:14 crc kubenswrapper[4899]: I1003 09:35:14.721210 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 03 09:35:14 crc kubenswrapper[4899]: I1003 09:35:14.778356 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"56098c86-7f51-46a3-9838-dfa07d174475\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 03 09:35:14 crc kubenswrapper[4899]: I1003 09:35:14.778442 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7b59\" (UniqueName: \"kubernetes.io/projected/56098c86-7f51-46a3-9838-dfa07d174475-kube-api-access-m7b59\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"56098c86-7f51-46a3-9838-dfa07d174475\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 03 09:35:14 crc kubenswrapper[4899]: I1003 09:35:14.880346 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"56098c86-7f51-46a3-9838-dfa07d174475\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 03 09:35:14 crc kubenswrapper[4899]: I1003 09:35:14.880469 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7b59\" (UniqueName: \"kubernetes.io/projected/56098c86-7f51-46a3-9838-dfa07d174475-kube-api-access-m7b59\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"56098c86-7f51-46a3-9838-dfa07d174475\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 03 09:35:14 crc kubenswrapper[4899]: I1003 09:35:14.880911 4899 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"56098c86-7f51-46a3-9838-dfa07d174475\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 03 09:35:14 crc kubenswrapper[4899]: I1003 09:35:14.900249 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7b59\" (UniqueName: \"kubernetes.io/projected/56098c86-7f51-46a3-9838-dfa07d174475-kube-api-access-m7b59\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"56098c86-7f51-46a3-9838-dfa07d174475\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 03 09:35:14 crc kubenswrapper[4899]: I1003 09:35:14.910683 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"56098c86-7f51-46a3-9838-dfa07d174475\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 03 09:35:15 crc kubenswrapper[4899]: I1003 09:35:15.045649 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 03 09:35:15 crc kubenswrapper[4899]: I1003 09:35:15.487491 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 03 09:35:15 crc kubenswrapper[4899]: I1003 09:35:15.745789 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"56098c86-7f51-46a3-9838-dfa07d174475","Type":"ContainerStarted","Data":"43e6f7f4adde1c3b0d0548e3ebdbba4b0d14df590b95512fa05219da8963a472"} Oct 03 09:35:16 crc kubenswrapper[4899]: I1003 09:35:16.757223 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"56098c86-7f51-46a3-9838-dfa07d174475","Type":"ContainerStarted","Data":"298427f58f0bccadb3209a0188d298d7af76f70843da3103ed046bb2b1c6f56a"} Oct 03 09:35:16 crc kubenswrapper[4899]: I1003 09:35:16.772886 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.7936930599999998 podStartE2EDuration="2.772867886s" podCreationTimestamp="2025-10-03 09:35:14 +0000 UTC" firstStartedPulling="2025-10-03 09:35:15.485492386 +0000 UTC m=+3289.592977339" lastFinishedPulling="2025-10-03 09:35:16.464667212 +0000 UTC m=+3290.572152165" observedRunningTime="2025-10-03 09:35:16.769800569 +0000 UTC m=+3290.877285522" watchObservedRunningTime="2025-10-03 09:35:16.772867886 +0000 UTC m=+3290.880352839" Oct 03 09:35:33 crc kubenswrapper[4899]: I1003 09:35:33.739851 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-rf2ld/must-gather-xgz5m"] Oct 03 09:35:33 crc kubenswrapper[4899]: I1003 09:35:33.742027 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rf2ld/must-gather-xgz5m" Oct 03 09:35:33 crc kubenswrapper[4899]: I1003 09:35:33.748302 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-rf2ld"/"kube-root-ca.crt" Oct 03 09:35:33 crc kubenswrapper[4899]: I1003 09:35:33.748607 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-rf2ld"/"openshift-service-ca.crt" Oct 03 09:35:33 crc kubenswrapper[4899]: I1003 09:35:33.748785 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-rf2ld"/"default-dockercfg-f5l2b" Oct 03 09:35:33 crc kubenswrapper[4899]: I1003 09:35:33.749072 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-rf2ld/must-gather-xgz5m"] Oct 03 09:35:33 crc kubenswrapper[4899]: I1003 09:35:33.841846 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77cgh\" (UniqueName: \"kubernetes.io/projected/51d13c5b-183a-42b0-a15d-156896f6154e-kube-api-access-77cgh\") pod \"must-gather-xgz5m\" (UID: \"51d13c5b-183a-42b0-a15d-156896f6154e\") " pod="openshift-must-gather-rf2ld/must-gather-xgz5m" Oct 03 09:35:33 crc kubenswrapper[4899]: I1003 09:35:33.842034 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/51d13c5b-183a-42b0-a15d-156896f6154e-must-gather-output\") pod \"must-gather-xgz5m\" (UID: \"51d13c5b-183a-42b0-a15d-156896f6154e\") " pod="openshift-must-gather-rf2ld/must-gather-xgz5m" Oct 03 09:35:33 crc kubenswrapper[4899]: I1003 09:35:33.943875 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77cgh\" (UniqueName: \"kubernetes.io/projected/51d13c5b-183a-42b0-a15d-156896f6154e-kube-api-access-77cgh\") pod \"must-gather-xgz5m\" (UID: \"51d13c5b-183a-42b0-a15d-156896f6154e\") " pod="openshift-must-gather-rf2ld/must-gather-xgz5m" Oct 03 09:35:33 crc kubenswrapper[4899]: I1003 09:35:33.945349 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/51d13c5b-183a-42b0-a15d-156896f6154e-must-gather-output\") pod \"must-gather-xgz5m\" (UID: \"51d13c5b-183a-42b0-a15d-156896f6154e\") " pod="openshift-must-gather-rf2ld/must-gather-xgz5m" Oct 03 09:35:33 crc kubenswrapper[4899]: I1003 09:35:33.945424 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/51d13c5b-183a-42b0-a15d-156896f6154e-must-gather-output\") pod \"must-gather-xgz5m\" (UID: \"51d13c5b-183a-42b0-a15d-156896f6154e\") " pod="openshift-must-gather-rf2ld/must-gather-xgz5m" Oct 03 09:35:33 crc kubenswrapper[4899]: I1003 09:35:33.963869 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77cgh\" (UniqueName: \"kubernetes.io/projected/51d13c5b-183a-42b0-a15d-156896f6154e-kube-api-access-77cgh\") pod \"must-gather-xgz5m\" (UID: \"51d13c5b-183a-42b0-a15d-156896f6154e\") " pod="openshift-must-gather-rf2ld/must-gather-xgz5m" Oct 03 09:35:34 crc kubenswrapper[4899]: I1003 09:35:34.069858 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rf2ld/must-gather-xgz5m" Oct 03 09:35:34 crc kubenswrapper[4899]: I1003 09:35:34.501741 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-rf2ld/must-gather-xgz5m"] Oct 03 09:35:34 crc kubenswrapper[4899]: W1003 09:35:34.514863 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51d13c5b_183a_42b0_a15d_156896f6154e.slice/crio-82dc8647ee40d96d2ce594c4300b59d047cdc47ff6dfaedcd67e25325eff2da5 WatchSource:0}: Error finding container 82dc8647ee40d96d2ce594c4300b59d047cdc47ff6dfaedcd67e25325eff2da5: Status 404 returned error can't find the container with id 82dc8647ee40d96d2ce594c4300b59d047cdc47ff6dfaedcd67e25325eff2da5 Oct 03 09:35:34 crc kubenswrapper[4899]: I1003 09:35:34.908361 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rf2ld/must-gather-xgz5m" event={"ID":"51d13c5b-183a-42b0-a15d-156896f6154e","Type":"ContainerStarted","Data":"82dc8647ee40d96d2ce594c4300b59d047cdc47ff6dfaedcd67e25325eff2da5"} Oct 03 09:35:38 crc kubenswrapper[4899]: I1003 09:35:38.947017 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rf2ld/must-gather-xgz5m" event={"ID":"51d13c5b-183a-42b0-a15d-156896f6154e","Type":"ContainerStarted","Data":"779eb809609563c28b5f515a3783dc17bf8e1a6fe4d7ec6647e4d2917931eef4"} Oct 03 09:35:38 crc kubenswrapper[4899]: I1003 09:35:38.947574 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rf2ld/must-gather-xgz5m" event={"ID":"51d13c5b-183a-42b0-a15d-156896f6154e","Type":"ContainerStarted","Data":"31d6abbf481210b871a1f677c180f0af9d33f894b3224f7e28d56af84519722d"} Oct 03 09:35:38 crc kubenswrapper[4899]: I1003 09:35:38.962376 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-rf2ld/must-gather-xgz5m" podStartSLOduration=2.6954929720000003 podStartE2EDuration="5.962356s" podCreationTimestamp="2025-10-03 09:35:33 +0000 UTC" firstStartedPulling="2025-10-03 09:35:34.517055585 +0000 UTC m=+3308.624540538" lastFinishedPulling="2025-10-03 09:35:37.783918613 +0000 UTC m=+3311.891403566" observedRunningTime="2025-10-03 09:35:38.960851073 +0000 UTC m=+3313.068336026" watchObservedRunningTime="2025-10-03 09:35:38.962356 +0000 UTC m=+3313.069840953" Oct 03 09:35:41 crc kubenswrapper[4899]: I1003 09:35:41.664956 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-rf2ld/crc-debug-lwtkf"] Oct 03 09:35:41 crc kubenswrapper[4899]: I1003 09:35:41.666713 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rf2ld/crc-debug-lwtkf" Oct 03 09:35:41 crc kubenswrapper[4899]: I1003 09:35:41.791759 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5j2x\" (UniqueName: \"kubernetes.io/projected/723e953f-e2bb-4cfd-9dae-db74acb34eea-kube-api-access-g5j2x\") pod \"crc-debug-lwtkf\" (UID: \"723e953f-e2bb-4cfd-9dae-db74acb34eea\") " pod="openshift-must-gather-rf2ld/crc-debug-lwtkf" Oct 03 09:35:41 crc kubenswrapper[4899]: I1003 09:35:41.791823 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/723e953f-e2bb-4cfd-9dae-db74acb34eea-host\") pod \"crc-debug-lwtkf\" (UID: \"723e953f-e2bb-4cfd-9dae-db74acb34eea\") " pod="openshift-must-gather-rf2ld/crc-debug-lwtkf" Oct 03 09:35:41 crc kubenswrapper[4899]: I1003 09:35:41.893862 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5j2x\" (UniqueName: \"kubernetes.io/projected/723e953f-e2bb-4cfd-9dae-db74acb34eea-kube-api-access-g5j2x\") pod \"crc-debug-lwtkf\" (UID: \"723e953f-e2bb-4cfd-9dae-db74acb34eea\") " pod="openshift-must-gather-rf2ld/crc-debug-lwtkf" Oct 03 09:35:41 crc kubenswrapper[4899]: I1003 09:35:41.894039 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/723e953f-e2bb-4cfd-9dae-db74acb34eea-host\") pod \"crc-debug-lwtkf\" (UID: \"723e953f-e2bb-4cfd-9dae-db74acb34eea\") " pod="openshift-must-gather-rf2ld/crc-debug-lwtkf" Oct 03 09:35:41 crc kubenswrapper[4899]: I1003 09:35:41.894242 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/723e953f-e2bb-4cfd-9dae-db74acb34eea-host\") pod \"crc-debug-lwtkf\" (UID: \"723e953f-e2bb-4cfd-9dae-db74acb34eea\") " pod="openshift-must-gather-rf2ld/crc-debug-lwtkf" Oct 03 09:35:41 crc kubenswrapper[4899]: I1003 09:35:41.914155 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5j2x\" (UniqueName: \"kubernetes.io/projected/723e953f-e2bb-4cfd-9dae-db74acb34eea-kube-api-access-g5j2x\") pod \"crc-debug-lwtkf\" (UID: \"723e953f-e2bb-4cfd-9dae-db74acb34eea\") " pod="openshift-must-gather-rf2ld/crc-debug-lwtkf" Oct 03 09:35:41 crc kubenswrapper[4899]: I1003 09:35:41.992069 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rf2ld/crc-debug-lwtkf" Oct 03 09:35:42 crc kubenswrapper[4899]: W1003 09:35:42.029481 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod723e953f_e2bb_4cfd_9dae_db74acb34eea.slice/crio-88b49cd0a04b2d52282d1ee19cc11b121e0c1eb965b5d453bbaa4e121c007f0d WatchSource:0}: Error finding container 88b49cd0a04b2d52282d1ee19cc11b121e0c1eb965b5d453bbaa4e121c007f0d: Status 404 returned error can't find the container with id 88b49cd0a04b2d52282d1ee19cc11b121e0c1eb965b5d453bbaa4e121c007f0d Oct 03 09:35:42 crc kubenswrapper[4899]: I1003 09:35:42.988066 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rf2ld/crc-debug-lwtkf" event={"ID":"723e953f-e2bb-4cfd-9dae-db74acb34eea","Type":"ContainerStarted","Data":"88b49cd0a04b2d52282d1ee19cc11b121e0c1eb965b5d453bbaa4e121c007f0d"} Oct 03 09:35:53 crc kubenswrapper[4899]: I1003 09:35:53.116133 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rf2ld/crc-debug-lwtkf" event={"ID":"723e953f-e2bb-4cfd-9dae-db74acb34eea","Type":"ContainerStarted","Data":"d89c6ed1579bdd35e598a05a3d6f0dd4dfd86e8efdf111aeb47df2999aaccfb8"} Oct 03 09:35:53 crc kubenswrapper[4899]: I1003 09:35:53.140083 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-rf2ld/crc-debug-lwtkf" podStartSLOduration=1.399104305 podStartE2EDuration="12.140062614s" podCreationTimestamp="2025-10-03 09:35:41 +0000 UTC" firstStartedPulling="2025-10-03 09:35:42.033581722 +0000 UTC m=+3316.141066675" lastFinishedPulling="2025-10-03 09:35:52.774540031 +0000 UTC m=+3326.882024984" observedRunningTime="2025-10-03 09:35:53.133126365 +0000 UTC m=+3327.240611328" watchObservedRunningTime="2025-10-03 09:35:53.140062614 +0000 UTC m=+3327.247547567" Oct 03 09:36:12 crc kubenswrapper[4899]: I1003 09:36:12.198305 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:36:12 crc kubenswrapper[4899]: I1003 09:36:12.198852 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:36:36 crc kubenswrapper[4899]: I1003 09:36:36.170477 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-54r9k"] Oct 03 09:36:36 crc kubenswrapper[4899]: I1003 09:36:36.174221 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-54r9k" Oct 03 09:36:36 crc kubenswrapper[4899]: I1003 09:36:36.187450 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-54r9k"] Oct 03 09:36:36 crc kubenswrapper[4899]: I1003 09:36:36.296991 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdp5g\" (UniqueName: \"kubernetes.io/projected/b12b4344-46e3-4fbc-80ec-0f69507f6344-kube-api-access-jdp5g\") pod \"redhat-operators-54r9k\" (UID: \"b12b4344-46e3-4fbc-80ec-0f69507f6344\") " pod="openshift-marketplace/redhat-operators-54r9k" Oct 03 09:36:36 crc kubenswrapper[4899]: I1003 09:36:36.297249 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b12b4344-46e3-4fbc-80ec-0f69507f6344-catalog-content\") pod \"redhat-operators-54r9k\" (UID: \"b12b4344-46e3-4fbc-80ec-0f69507f6344\") " pod="openshift-marketplace/redhat-operators-54r9k" Oct 03 09:36:36 crc kubenswrapper[4899]: I1003 09:36:36.297360 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b12b4344-46e3-4fbc-80ec-0f69507f6344-utilities\") pod \"redhat-operators-54r9k\" (UID: \"b12b4344-46e3-4fbc-80ec-0f69507f6344\") " pod="openshift-marketplace/redhat-operators-54r9k" Oct 03 09:36:36 crc kubenswrapper[4899]: I1003 09:36:36.399741 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdp5g\" (UniqueName: \"kubernetes.io/projected/b12b4344-46e3-4fbc-80ec-0f69507f6344-kube-api-access-jdp5g\") pod \"redhat-operators-54r9k\" (UID: \"b12b4344-46e3-4fbc-80ec-0f69507f6344\") " pod="openshift-marketplace/redhat-operators-54r9k" Oct 03 09:36:36 crc kubenswrapper[4899]: I1003 09:36:36.399852 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b12b4344-46e3-4fbc-80ec-0f69507f6344-catalog-content\") pod \"redhat-operators-54r9k\" (UID: \"b12b4344-46e3-4fbc-80ec-0f69507f6344\") " pod="openshift-marketplace/redhat-operators-54r9k" Oct 03 09:36:36 crc kubenswrapper[4899]: I1003 09:36:36.399927 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b12b4344-46e3-4fbc-80ec-0f69507f6344-utilities\") pod \"redhat-operators-54r9k\" (UID: \"b12b4344-46e3-4fbc-80ec-0f69507f6344\") " pod="openshift-marketplace/redhat-operators-54r9k" Oct 03 09:36:36 crc kubenswrapper[4899]: I1003 09:36:36.400457 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b12b4344-46e3-4fbc-80ec-0f69507f6344-utilities\") pod \"redhat-operators-54r9k\" (UID: \"b12b4344-46e3-4fbc-80ec-0f69507f6344\") " pod="openshift-marketplace/redhat-operators-54r9k" Oct 03 09:36:36 crc kubenswrapper[4899]: I1003 09:36:36.400915 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b12b4344-46e3-4fbc-80ec-0f69507f6344-catalog-content\") pod \"redhat-operators-54r9k\" (UID: \"b12b4344-46e3-4fbc-80ec-0f69507f6344\") " pod="openshift-marketplace/redhat-operators-54r9k" Oct 03 09:36:36 crc kubenswrapper[4899]: I1003 09:36:36.435114 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdp5g\" (UniqueName: \"kubernetes.io/projected/b12b4344-46e3-4fbc-80ec-0f69507f6344-kube-api-access-jdp5g\") pod \"redhat-operators-54r9k\" (UID: \"b12b4344-46e3-4fbc-80ec-0f69507f6344\") " pod="openshift-marketplace/redhat-operators-54r9k" Oct 03 09:36:36 crc kubenswrapper[4899]: I1003 09:36:36.496392 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-54r9k" Oct 03 09:36:37 crc kubenswrapper[4899]: I1003 09:36:37.031816 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-54r9k"] Oct 03 09:36:37 crc kubenswrapper[4899]: I1003 09:36:37.558858 4899 generic.go:334] "Generic (PLEG): container finished" podID="b12b4344-46e3-4fbc-80ec-0f69507f6344" containerID="2d0a40a4e2c88cb84dc00b52c41bd49f8f58d1b33e9a02d7ae22eda71525fe1c" exitCode=0 Oct 03 09:36:37 crc kubenswrapper[4899]: I1003 09:36:37.559397 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54r9k" event={"ID":"b12b4344-46e3-4fbc-80ec-0f69507f6344","Type":"ContainerDied","Data":"2d0a40a4e2c88cb84dc00b52c41bd49f8f58d1b33e9a02d7ae22eda71525fe1c"} Oct 03 09:36:37 crc kubenswrapper[4899]: I1003 09:36:37.559431 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54r9k" event={"ID":"b12b4344-46e3-4fbc-80ec-0f69507f6344","Type":"ContainerStarted","Data":"eba05de8ab1546ea3efa4ca050a81f0ad5ebbba56b3c3c3140d16f744213272e"} Oct 03 09:36:37 crc kubenswrapper[4899]: I1003 09:36:37.574069 4899 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 09:36:38 crc kubenswrapper[4899]: I1003 09:36:38.572157 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54r9k" event={"ID":"b12b4344-46e3-4fbc-80ec-0f69507f6344","Type":"ContainerStarted","Data":"b1c0814ae0e61c65c0dcd091db0fc2204d38be1139b07b703851dda180811e25"} Oct 03 09:36:40 crc kubenswrapper[4899]: I1003 09:36:40.591374 4899 generic.go:334] "Generic (PLEG): container finished" podID="b12b4344-46e3-4fbc-80ec-0f69507f6344" containerID="b1c0814ae0e61c65c0dcd091db0fc2204d38be1139b07b703851dda180811e25" exitCode=0 Oct 03 09:36:40 crc kubenswrapper[4899]: I1003 09:36:40.591446 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54r9k" event={"ID":"b12b4344-46e3-4fbc-80ec-0f69507f6344","Type":"ContainerDied","Data":"b1c0814ae0e61c65c0dcd091db0fc2204d38be1139b07b703851dda180811e25"} Oct 03 09:36:41 crc kubenswrapper[4899]: I1003 09:36:41.605364 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54r9k" event={"ID":"b12b4344-46e3-4fbc-80ec-0f69507f6344","Type":"ContainerStarted","Data":"75270dd33cde09669b0f815fefcf0cdd6f3850cede1e439552558e520faa73c4"} Oct 03 09:36:41 crc kubenswrapper[4899]: I1003 09:36:41.627078 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-54r9k" podStartSLOduration=2.105270186 podStartE2EDuration="5.627056101s" podCreationTimestamp="2025-10-03 09:36:36 +0000 UTC" firstStartedPulling="2025-10-03 09:36:37.573767722 +0000 UTC m=+3371.681252675" lastFinishedPulling="2025-10-03 09:36:41.095553637 +0000 UTC m=+3375.203038590" observedRunningTime="2025-10-03 09:36:41.626480393 +0000 UTC m=+3375.733965346" watchObservedRunningTime="2025-10-03 09:36:41.627056101 +0000 UTC m=+3375.734541054" Oct 03 09:36:42 crc kubenswrapper[4899]: I1003 09:36:42.198091 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:36:42 crc kubenswrapper[4899]: I1003 09:36:42.198481 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:36:45 crc kubenswrapper[4899]: I1003 09:36:45.496388 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-56dc79cc94-hbxqp_85b9210c-c4ec-4020-9137-f4b4fdf9dc51/barbican-api/0.log" Oct 03 09:36:45 crc kubenswrapper[4899]: I1003 09:36:45.563749 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-56dc79cc94-hbxqp_85b9210c-c4ec-4020-9137-f4b4fdf9dc51/barbican-api-log/0.log" Oct 03 09:36:45 crc kubenswrapper[4899]: I1003 09:36:45.739658 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-59465fcb84-kkbzz_0bd52669-a824-4b16-a840-2feed9e46a6c/barbican-keystone-listener/0.log" Oct 03 09:36:45 crc kubenswrapper[4899]: I1003 09:36:45.859455 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-59465fcb84-kkbzz_0bd52669-a824-4b16-a840-2feed9e46a6c/barbican-keystone-listener-log/0.log" Oct 03 09:36:45 crc kubenswrapper[4899]: I1003 09:36:45.913977 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5b77574dc-vm5lv_69c8959c-64e4-43a6-9b2c-133dd960fc67/barbican-worker/0.log" Oct 03 09:36:46 crc kubenswrapper[4899]: I1003 09:36:46.069666 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5b77574dc-vm5lv_69c8959c-64e4-43a6-9b2c-133dd960fc67/barbican-worker-log/0.log" Oct 03 09:36:46 crc kubenswrapper[4899]: I1003 09:36:46.232004 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb_9ad20d0d-637a-49d9-8a83-bfae0d7c2a37/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:36:46 crc kubenswrapper[4899]: I1003 09:36:46.449320 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0/ceilometer-central-agent/0.log" Oct 03 09:36:46 crc kubenswrapper[4899]: I1003 09:36:46.496983 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-54r9k" Oct 03 09:36:46 crc kubenswrapper[4899]: I1003 09:36:46.497048 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-54r9k" Oct 03 09:36:46 crc kubenswrapper[4899]: I1003 09:36:46.516013 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0/ceilometer-notification-agent/0.log" Oct 03 09:36:46 crc kubenswrapper[4899]: I1003 09:36:46.614559 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0/proxy-httpd/0.log" Oct 03 09:36:46 crc kubenswrapper[4899]: I1003 09:36:46.689177 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0/sg-core/0.log" Oct 03 09:36:46 crc kubenswrapper[4899]: I1003 09:36:46.845386 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_65edf39f-decc-476a-a5f3-b3d2d785ae67/cinder-api/0.log" Oct 03 09:36:46 crc kubenswrapper[4899]: I1003 09:36:46.926693 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_65edf39f-decc-476a-a5f3-b3d2d785ae67/cinder-api-log/0.log" Oct 03 09:36:47 crc kubenswrapper[4899]: I1003 09:36:47.142218 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0d32bb24-0270-45bb-b242-0aa2517f1cf3/cinder-scheduler/0.log" Oct 03 09:36:47 crc kubenswrapper[4899]: I1003 09:36:47.299867 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0d32bb24-0270-45bb-b242-0aa2517f1cf3/probe/0.log" Oct 03 09:36:47 crc kubenswrapper[4899]: I1003 09:36:47.510190 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-7l699_fef8b1ca-fe97-49df-9d53-89edfaa3d12a/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:36:47 crc kubenswrapper[4899]: I1003 09:36:47.551626 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-54r9k" podUID="b12b4344-46e3-4fbc-80ec-0f69507f6344" containerName="registry-server" probeResult="failure" output=< Oct 03 09:36:47 crc kubenswrapper[4899]: timeout: failed to connect service ":50051" within 1s Oct 03 09:36:47 crc kubenswrapper[4899]: > Oct 03 09:36:47 crc kubenswrapper[4899]: I1003 09:36:47.718971 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-djftw_51abfa86-5d01-4b3e-aceb-155ded93aa49/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:36:47 crc kubenswrapper[4899]: I1003 09:36:47.855442 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-rr46f_fc6f7423-a7b0-4bd0-ac84-f65eb45233b3/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:36:48 crc kubenswrapper[4899]: I1003 09:36:48.030068 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78c64bc9c5-hbbsx_b0cbf4db-6115-4cb0-8aa1-b773b07e37e4/init/0.log" Oct 03 09:36:48 crc kubenswrapper[4899]: I1003 09:36:48.267764 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78c64bc9c5-hbbsx_b0cbf4db-6115-4cb0-8aa1-b773b07e37e4/init/0.log" Oct 03 09:36:48 crc kubenswrapper[4899]: I1003 09:36:48.365024 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78c64bc9c5-hbbsx_b0cbf4db-6115-4cb0-8aa1-b773b07e37e4/dnsmasq-dns/0.log" Oct 03 09:36:48 crc kubenswrapper[4899]: I1003 09:36:48.611499 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-tskjn_521016b7-078a-42dd-bec6-739da052031b/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:36:48 crc kubenswrapper[4899]: I1003 09:36:48.633849 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee/glance-httpd/0.log" Oct 03 09:36:48 crc kubenswrapper[4899]: I1003 09:36:48.852513 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee/glance-log/0.log" Oct 03 09:36:48 crc kubenswrapper[4899]: I1003 09:36:48.899568 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_64f5461f-c255-4b93-9d86-65321f2dc74b/glance-httpd/0.log" Oct 03 09:36:49 crc kubenswrapper[4899]: I1003 09:36:49.093577 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_64f5461f-c255-4b93-9d86-65321f2dc74b/glance-log/0.log" Oct 03 09:36:49 crc kubenswrapper[4899]: I1003 09:36:49.319666 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7f5ccd89b4-5dfm2_bf908711-a33e-40be-b5a0-c82254721d41/horizon/0.log" Oct 03 09:36:49 crc kubenswrapper[4899]: I1003 09:36:49.447728 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-hr45w_3aad51f1-e1c6-4677-a00a-e81438b9650a/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:36:49 crc kubenswrapper[4899]: I1003 09:36:49.648029 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7f5ccd89b4-5dfm2_bf908711-a33e-40be-b5a0-c82254721d41/horizon-log/0.log" Oct 03 09:36:49 crc kubenswrapper[4899]: I1003 09:36:49.710852 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-bxmbr_6a1cd0af-23db-4234-b97c-e57852eaa634/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:36:50 crc kubenswrapper[4899]: I1003 09:36:50.006554 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29324701-75rw5_dad5826d-6e25-43af-9916-de4fd15faa3a/keystone-cron/0.log" Oct 03 09:36:50 crc kubenswrapper[4899]: I1003 09:36:50.025584 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-688fdbdf8c-rnx7k_36a71770-b047-4d86-96c0-2888f9258599/keystone-api/0.log" Oct 03 09:36:50 crc kubenswrapper[4899]: I1003 09:36:50.151380 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_c05f2aa3-2568-45fa-ad1c-704870317a49/kube-state-metrics/0.log" Oct 03 09:36:50 crc kubenswrapper[4899]: I1003 09:36:50.274988 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-cb474_d9001fcb-add1-41c8-9638-097229339246/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:36:50 crc kubenswrapper[4899]: I1003 09:36:50.678260 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-cdb85d7df-9hdqn_3e425bd0-71ee-4b86-a246-e31d103a8745/neutron-httpd/0.log" Oct 03 09:36:50 crc kubenswrapper[4899]: I1003 09:36:50.698969 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-cdb85d7df-9hdqn_3e425bd0-71ee-4b86-a246-e31d103a8745/neutron-api/0.log" Oct 03 09:36:50 crc kubenswrapper[4899]: I1003 09:36:50.900928 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l_9cf278a1-e80c-4739-9166-b75a8f6f3aea/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:36:51 crc kubenswrapper[4899]: I1003 09:36:51.477955 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_76c142d4-6700-4120-bd50-aaf4e1b8d5b8/nova-api-log/0.log" Oct 03 09:36:51 crc kubenswrapper[4899]: I1003 09:36:51.704062 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_4a91feca-8e5c-489a-bd2b-222f17e9b6d6/nova-cell0-conductor-conductor/0.log" Oct 03 09:36:51 crc kubenswrapper[4899]: I1003 09:36:51.752764 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_76c142d4-6700-4120-bd50-aaf4e1b8d5b8/nova-api-api/0.log" Oct 03 09:36:52 crc kubenswrapper[4899]: I1003 09:36:52.063691 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_d2f278b3-9210-4f40-96f3-1605efa157ef/nova-cell1-conductor-conductor/0.log" Oct 03 09:36:52 crc kubenswrapper[4899]: I1003 09:36:52.123452 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_0bdcd25b-9c6e-40d6-82d4-6af348b37c1a/nova-cell1-novncproxy-novncproxy/0.log" Oct 03 09:36:52 crc kubenswrapper[4899]: I1003 09:36:52.406200 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-kmjd7_0bc97030-8da8-4cd2-8645-9962d50b08d3/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:36:52 crc kubenswrapper[4899]: I1003 09:36:52.565341 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_4c14a82a-78c7-4366-a2bc-91e1f880d841/nova-metadata-log/0.log" Oct 03 09:36:53 crc kubenswrapper[4899]: I1003 09:36:53.036052 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_6969051c-bc07-454c-b958-b9e203f95ee5/nova-scheduler-scheduler/0.log" Oct 03 09:36:53 crc kubenswrapper[4899]: I1003 09:36:53.283709 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_b466ebf1-ec52-4c92-8ea9-f0f329c6ab93/mysql-bootstrap/0.log" Oct 03 09:36:53 crc kubenswrapper[4899]: I1003 09:36:53.459725 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_b466ebf1-ec52-4c92-8ea9-f0f329c6ab93/mysql-bootstrap/0.log" Oct 03 09:36:53 crc kubenswrapper[4899]: I1003 09:36:53.536490 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_b466ebf1-ec52-4c92-8ea9-f0f329c6ab93/galera/0.log" Oct 03 09:36:53 crc kubenswrapper[4899]: I1003 09:36:53.843624 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9ec49b55-9814-4053-a0dd-eda5b7f7995a/mysql-bootstrap/0.log" Oct 03 09:36:53 crc kubenswrapper[4899]: I1003 09:36:53.961498 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_4c14a82a-78c7-4366-a2bc-91e1f880d841/nova-metadata-metadata/0.log" Oct 03 09:36:54 crc kubenswrapper[4899]: I1003 09:36:54.027180 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9ec49b55-9814-4053-a0dd-eda5b7f7995a/mysql-bootstrap/0.log" Oct 03 09:36:54 crc kubenswrapper[4899]: I1003 09:36:54.119258 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9ec49b55-9814-4053-a0dd-eda5b7f7995a/galera/0.log" Oct 03 09:36:54 crc kubenswrapper[4899]: I1003 09:36:54.261230 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_1c6ed6e6-287d-4267-9cfd-b7b554691da8/openstackclient/0.log" Oct 03 09:36:54 crc kubenswrapper[4899]: I1003 09:36:54.481363 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-w992v_78c399de-31f5-439f-8f0b-24c8dba1875e/openstack-network-exporter/0.log" Oct 03 09:36:54 crc kubenswrapper[4899]: I1003 09:36:54.644333 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c7dff_a14a6054-9c4a-414f-ab4e-b0732e33ce1c/ovsdb-server-init/0.log" Oct 03 09:36:54 crc kubenswrapper[4899]: I1003 09:36:54.817565 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c7dff_a14a6054-9c4a-414f-ab4e-b0732e33ce1c/ovsdb-server-init/0.log" Oct 03 09:36:54 crc kubenswrapper[4899]: I1003 09:36:54.835414 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c7dff_a14a6054-9c4a-414f-ab4e-b0732e33ce1c/ovs-vswitchd/0.log" Oct 03 09:36:54 crc kubenswrapper[4899]: I1003 09:36:54.863391 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c7dff_a14a6054-9c4a-414f-ab4e-b0732e33ce1c/ovsdb-server/0.log" Oct 03 09:36:55 crc kubenswrapper[4899]: I1003 09:36:55.029670 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-wfz45_89364578-24ad-4c19-8e0b-ba123f58f4eb/ovn-controller/0.log" Oct 03 09:36:55 crc kubenswrapper[4899]: I1003 09:36:55.310956 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-2swmz_2d954cfa-e3a2-4fc0-a1af-61234475db07/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:36:55 crc kubenswrapper[4899]: I1003 09:36:55.413431 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_71e79cb5-28f4-4102-892e-479502ff4db9/openstack-network-exporter/0.log" Oct 03 09:36:55 crc kubenswrapper[4899]: I1003 09:36:55.536315 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_71e79cb5-28f4-4102-892e-479502ff4db9/ovn-northd/0.log" Oct 03 09:36:55 crc kubenswrapper[4899]: I1003 09:36:55.639556 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_6ad382d6-f0b8-43b2-aeea-98ace59fb6cf/openstack-network-exporter/0.log" Oct 03 09:36:55 crc kubenswrapper[4899]: I1003 09:36:55.766186 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_6ad382d6-f0b8-43b2-aeea-98ace59fb6cf/ovsdbserver-nb/0.log" Oct 03 09:36:55 crc kubenswrapper[4899]: I1003 09:36:55.843253 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_3e11e1a1-7b97-4717-85bc-834b214d4526/openstack-network-exporter/0.log" Oct 03 09:36:55 crc kubenswrapper[4899]: I1003 09:36:55.966741 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_3e11e1a1-7b97-4717-85bc-834b214d4526/ovsdbserver-sb/0.log" Oct 03 09:36:56 crc kubenswrapper[4899]: I1003 09:36:56.131754 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-568fd9848b-bw6ch_73b0bcea-efbe-4c62-b97c-031ea8fee918/placement-api/0.log" Oct 03 09:36:56 crc kubenswrapper[4899]: I1003 09:36:56.266490 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-568fd9848b-bw6ch_73b0bcea-efbe-4c62-b97c-031ea8fee918/placement-log/0.log" Oct 03 09:36:56 crc kubenswrapper[4899]: I1003 09:36:56.341330 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_75734f37-27af-4b79-ac28-4546a092e218/setup-container/0.log" Oct 03 09:36:56 crc kubenswrapper[4899]: I1003 09:36:56.580903 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_75734f37-27af-4b79-ac28-4546a092e218/setup-container/0.log" Oct 03 09:36:56 crc kubenswrapper[4899]: I1003 09:36:56.585475 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_75734f37-27af-4b79-ac28-4546a092e218/rabbitmq/0.log" Oct 03 09:36:56 crc kubenswrapper[4899]: I1003 09:36:56.834967 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7415d874-aa51-4fc4-8b40-b487392c248c/setup-container/0.log" Oct 03 09:36:56 crc kubenswrapper[4899]: I1003 09:36:56.995396 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7415d874-aa51-4fc4-8b40-b487392c248c/rabbitmq/0.log" Oct 03 09:36:57 crc kubenswrapper[4899]: I1003 09:36:57.086481 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7415d874-aa51-4fc4-8b40-b487392c248c/setup-container/0.log" Oct 03 09:36:57 crc kubenswrapper[4899]: I1003 09:36:57.258612 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-df25h_23483d3a-dd9e-4fcd-81a6-465936a69838/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:36:57 crc kubenswrapper[4899]: I1003 09:36:57.390491 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-9rwqd_f5dcd890-e7fa-4739-919b-6b1b77ff741a/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:36:57 crc kubenswrapper[4899]: I1003 09:36:57.552975 4899 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-54r9k" podUID="b12b4344-46e3-4fbc-80ec-0f69507f6344" containerName="registry-server" probeResult="failure" output=< Oct 03 09:36:57 crc kubenswrapper[4899]: timeout: failed to connect service ":50051" within 1s Oct 03 09:36:57 crc kubenswrapper[4899]: > Oct 03 09:36:57 crc kubenswrapper[4899]: I1003 09:36:57.591912 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp_b5ae39e5-0bf8-4627-8e97-7162c0861524/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:36:57 crc kubenswrapper[4899]: I1003 09:36:57.736746 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-t58bp_ec00fd0d-26af-42f1-afdd-a21d668719d5/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:36:57 crc kubenswrapper[4899]: I1003 09:36:57.897765 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-4qh2m_f95e0dc0-dc93-4ec2-ae14-96f3641e651a/ssh-known-hosts-edpm-deployment/0.log" Oct 03 09:36:58 crc kubenswrapper[4899]: I1003 09:36:58.101112 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-54794d7d5c-64vlg_088c667d-5a03-44d1-a2fc-c9de7910e5a8/proxy-server/0.log" Oct 03 09:36:58 crc kubenswrapper[4899]: I1003 09:36:58.253597 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-54794d7d5c-64vlg_088c667d-5a03-44d1-a2fc-c9de7910e5a8/proxy-httpd/0.log" Oct 03 09:36:58 crc kubenswrapper[4899]: I1003 09:36:58.366104 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-s5xff_79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca/swift-ring-rebalance/0.log" Oct 03 09:36:58 crc kubenswrapper[4899]: I1003 09:36:58.531181 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/account-auditor/0.log" Oct 03 09:36:58 crc kubenswrapper[4899]: I1003 09:36:58.612802 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/account-reaper/0.log" Oct 03 09:36:58 crc kubenswrapper[4899]: I1003 09:36:58.727343 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/account-replicator/0.log" Oct 03 09:36:58 crc kubenswrapper[4899]: I1003 09:36:58.738770 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/account-server/0.log" Oct 03 09:36:58 crc kubenswrapper[4899]: I1003 09:36:58.854201 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/container-auditor/0.log" Oct 03 09:36:58 crc kubenswrapper[4899]: I1003 09:36:58.952332 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/container-replicator/0.log" Oct 03 09:36:58 crc kubenswrapper[4899]: I1003 09:36:58.959249 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/container-server/0.log" Oct 03 09:36:59 crc kubenswrapper[4899]: I1003 09:36:59.115745 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/container-updater/0.log" Oct 03 09:36:59 crc kubenswrapper[4899]: I1003 09:36:59.190554 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/object-auditor/0.log" Oct 03 09:36:59 crc kubenswrapper[4899]: I1003 09:36:59.203558 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/object-expirer/0.log" Oct 03 09:36:59 crc kubenswrapper[4899]: I1003 09:36:59.369872 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/object-replicator/0.log" Oct 03 09:36:59 crc kubenswrapper[4899]: I1003 09:36:59.420164 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/object-server/0.log" Oct 03 09:36:59 crc kubenswrapper[4899]: I1003 09:36:59.538077 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/object-updater/0.log" Oct 03 09:36:59 crc kubenswrapper[4899]: I1003 09:36:59.587785 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/rsync/0.log" Oct 03 09:36:59 crc kubenswrapper[4899]: I1003 09:36:59.621404 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/swift-recon-cron/0.log" Oct 03 09:36:59 crc kubenswrapper[4899]: I1003 09:36:59.879755 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-kw265_78460eaf-b283-4155-be7c-57230376bbcc/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:37:00 crc kubenswrapper[4899]: I1003 09:37:00.111683 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_39b95e3c-c5c3-44c9-a89f-490bcde4fc69/tempest-tests-tempest-tests-runner/0.log" Oct 03 09:37:00 crc kubenswrapper[4899]: I1003 09:37:00.123612 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_56098c86-7f51-46a3-9838-dfa07d174475/test-operator-logs-container/0.log" Oct 03 09:37:00 crc kubenswrapper[4899]: I1003 09:37:00.368605 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k_fea4a390-4920-4967-9e2a-152d46f212a3/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:37:06 crc kubenswrapper[4899]: I1003 09:37:06.550378 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-54r9k" Oct 03 09:37:06 crc kubenswrapper[4899]: I1003 09:37:06.604628 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-54r9k" Oct 03 09:37:06 crc kubenswrapper[4899]: I1003 09:37:06.988446 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_231e3958-c17a-4f0b-a83e-4801b497b942/memcached/0.log" Oct 03 09:37:07 crc kubenswrapper[4899]: I1003 09:37:07.378078 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-54r9k"] Oct 03 09:37:07 crc kubenswrapper[4899]: I1003 09:37:07.858876 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-54r9k" podUID="b12b4344-46e3-4fbc-80ec-0f69507f6344" containerName="registry-server" containerID="cri-o://75270dd33cde09669b0f815fefcf0cdd6f3850cede1e439552558e520faa73c4" gracePeriod=2 Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.410176 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-54r9k" Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.477835 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b12b4344-46e3-4fbc-80ec-0f69507f6344-utilities\") pod \"b12b4344-46e3-4fbc-80ec-0f69507f6344\" (UID: \"b12b4344-46e3-4fbc-80ec-0f69507f6344\") " Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.478098 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdp5g\" (UniqueName: \"kubernetes.io/projected/b12b4344-46e3-4fbc-80ec-0f69507f6344-kube-api-access-jdp5g\") pod \"b12b4344-46e3-4fbc-80ec-0f69507f6344\" (UID: \"b12b4344-46e3-4fbc-80ec-0f69507f6344\") " Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.478149 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b12b4344-46e3-4fbc-80ec-0f69507f6344-catalog-content\") pod \"b12b4344-46e3-4fbc-80ec-0f69507f6344\" (UID: \"b12b4344-46e3-4fbc-80ec-0f69507f6344\") " Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.480552 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b12b4344-46e3-4fbc-80ec-0f69507f6344-utilities" (OuterVolumeSpecName: "utilities") pod "b12b4344-46e3-4fbc-80ec-0f69507f6344" (UID: "b12b4344-46e3-4fbc-80ec-0f69507f6344"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.502159 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b12b4344-46e3-4fbc-80ec-0f69507f6344-kube-api-access-jdp5g" (OuterVolumeSpecName: "kube-api-access-jdp5g") pod "b12b4344-46e3-4fbc-80ec-0f69507f6344" (UID: "b12b4344-46e3-4fbc-80ec-0f69507f6344"). InnerVolumeSpecName "kube-api-access-jdp5g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.579800 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b12b4344-46e3-4fbc-80ec-0f69507f6344-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b12b4344-46e3-4fbc-80ec-0f69507f6344" (UID: "b12b4344-46e3-4fbc-80ec-0f69507f6344"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.581534 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdp5g\" (UniqueName: \"kubernetes.io/projected/b12b4344-46e3-4fbc-80ec-0f69507f6344-kube-api-access-jdp5g\") on node \"crc\" DevicePath \"\"" Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.581570 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b12b4344-46e3-4fbc-80ec-0f69507f6344-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.581580 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b12b4344-46e3-4fbc-80ec-0f69507f6344-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.876689 4899 generic.go:334] "Generic (PLEG): container finished" podID="b12b4344-46e3-4fbc-80ec-0f69507f6344" containerID="75270dd33cde09669b0f815fefcf0cdd6f3850cede1e439552558e520faa73c4" exitCode=0 Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.876747 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54r9k" event={"ID":"b12b4344-46e3-4fbc-80ec-0f69507f6344","Type":"ContainerDied","Data":"75270dd33cde09669b0f815fefcf0cdd6f3850cede1e439552558e520faa73c4"} Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.876785 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54r9k" event={"ID":"b12b4344-46e3-4fbc-80ec-0f69507f6344","Type":"ContainerDied","Data":"eba05de8ab1546ea3efa4ca050a81f0ad5ebbba56b3c3c3140d16f744213272e"} Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.876811 4899 scope.go:117] "RemoveContainer" containerID="75270dd33cde09669b0f815fefcf0cdd6f3850cede1e439552558e520faa73c4" Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.877023 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-54r9k" Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.907953 4899 scope.go:117] "RemoveContainer" containerID="b1c0814ae0e61c65c0dcd091db0fc2204d38be1139b07b703851dda180811e25" Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.916149 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-54r9k"] Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.930957 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-54r9k"] Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.933140 4899 scope.go:117] "RemoveContainer" containerID="2d0a40a4e2c88cb84dc00b52c41bd49f8f58d1b33e9a02d7ae22eda71525fe1c" Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.984415 4899 scope.go:117] "RemoveContainer" containerID="75270dd33cde09669b0f815fefcf0cdd6f3850cede1e439552558e520faa73c4" Oct 03 09:37:08 crc kubenswrapper[4899]: E1003 09:37:08.984988 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75270dd33cde09669b0f815fefcf0cdd6f3850cede1e439552558e520faa73c4\": container with ID starting with 75270dd33cde09669b0f815fefcf0cdd6f3850cede1e439552558e520faa73c4 not found: ID does not exist" containerID="75270dd33cde09669b0f815fefcf0cdd6f3850cede1e439552558e520faa73c4" Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.985086 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75270dd33cde09669b0f815fefcf0cdd6f3850cede1e439552558e520faa73c4"} err="failed to get container status \"75270dd33cde09669b0f815fefcf0cdd6f3850cede1e439552558e520faa73c4\": rpc error: code = NotFound desc = could not find container \"75270dd33cde09669b0f815fefcf0cdd6f3850cede1e439552558e520faa73c4\": container with ID starting with 75270dd33cde09669b0f815fefcf0cdd6f3850cede1e439552558e520faa73c4 not found: ID does not exist" Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.985183 4899 scope.go:117] "RemoveContainer" containerID="b1c0814ae0e61c65c0dcd091db0fc2204d38be1139b07b703851dda180811e25" Oct 03 09:37:08 crc kubenswrapper[4899]: E1003 09:37:08.986195 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1c0814ae0e61c65c0dcd091db0fc2204d38be1139b07b703851dda180811e25\": container with ID starting with b1c0814ae0e61c65c0dcd091db0fc2204d38be1139b07b703851dda180811e25 not found: ID does not exist" containerID="b1c0814ae0e61c65c0dcd091db0fc2204d38be1139b07b703851dda180811e25" Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.986229 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1c0814ae0e61c65c0dcd091db0fc2204d38be1139b07b703851dda180811e25"} err="failed to get container status \"b1c0814ae0e61c65c0dcd091db0fc2204d38be1139b07b703851dda180811e25\": rpc error: code = NotFound desc = could not find container \"b1c0814ae0e61c65c0dcd091db0fc2204d38be1139b07b703851dda180811e25\": container with ID starting with b1c0814ae0e61c65c0dcd091db0fc2204d38be1139b07b703851dda180811e25 not found: ID does not exist" Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.986248 4899 scope.go:117] "RemoveContainer" containerID="2d0a40a4e2c88cb84dc00b52c41bd49f8f58d1b33e9a02d7ae22eda71525fe1c" Oct 03 09:37:08 crc kubenswrapper[4899]: E1003 09:37:08.994596 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d0a40a4e2c88cb84dc00b52c41bd49f8f58d1b33e9a02d7ae22eda71525fe1c\": container with ID starting with 2d0a40a4e2c88cb84dc00b52c41bd49f8f58d1b33e9a02d7ae22eda71525fe1c not found: ID does not exist" containerID="2d0a40a4e2c88cb84dc00b52c41bd49f8f58d1b33e9a02d7ae22eda71525fe1c" Oct 03 09:37:08 crc kubenswrapper[4899]: I1003 09:37:08.994654 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d0a40a4e2c88cb84dc00b52c41bd49f8f58d1b33e9a02d7ae22eda71525fe1c"} err="failed to get container status \"2d0a40a4e2c88cb84dc00b52c41bd49f8f58d1b33e9a02d7ae22eda71525fe1c\": rpc error: code = NotFound desc = could not find container \"2d0a40a4e2c88cb84dc00b52c41bd49f8f58d1b33e9a02d7ae22eda71525fe1c\": container with ID starting with 2d0a40a4e2c88cb84dc00b52c41bd49f8f58d1b33e9a02d7ae22eda71525fe1c not found: ID does not exist" Oct 03 09:37:10 crc kubenswrapper[4899]: I1003 09:37:10.538245 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b12b4344-46e3-4fbc-80ec-0f69507f6344" path="/var/lib/kubelet/pods/b12b4344-46e3-4fbc-80ec-0f69507f6344/volumes" Oct 03 09:37:12 crc kubenswrapper[4899]: I1003 09:37:12.198359 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:37:12 crc kubenswrapper[4899]: I1003 09:37:12.199360 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:37:12 crc kubenswrapper[4899]: I1003 09:37:12.199533 4899 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 09:37:12 crc kubenswrapper[4899]: I1003 09:37:12.200576 4899 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"512c2485fabb512b76ede2aa8b2cf298ff6f84b744dde89782c0144994984843"} pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 09:37:12 crc kubenswrapper[4899]: I1003 09:37:12.200763 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" containerID="cri-o://512c2485fabb512b76ede2aa8b2cf298ff6f84b744dde89782c0144994984843" gracePeriod=600 Oct 03 09:37:12 crc kubenswrapper[4899]: I1003 09:37:12.922544 4899 generic.go:334] "Generic (PLEG): container finished" podID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerID="512c2485fabb512b76ede2aa8b2cf298ff6f84b744dde89782c0144994984843" exitCode=0 Oct 03 09:37:12 crc kubenswrapper[4899]: I1003 09:37:12.922624 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerDied","Data":"512c2485fabb512b76ede2aa8b2cf298ff6f84b744dde89782c0144994984843"} Oct 03 09:37:12 crc kubenswrapper[4899]: I1003 09:37:12.923074 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerStarted","Data":"b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba"} Oct 03 09:37:12 crc kubenswrapper[4899]: I1003 09:37:12.923098 4899 scope.go:117] "RemoveContainer" containerID="f8682a325afc085341d64b7745ad4eda585aeddac6e31da445da012ce2e8a193" Oct 03 09:37:52 crc kubenswrapper[4899]: I1003 09:37:52.279286 4899 generic.go:334] "Generic (PLEG): container finished" podID="723e953f-e2bb-4cfd-9dae-db74acb34eea" containerID="d89c6ed1579bdd35e598a05a3d6f0dd4dfd86e8efdf111aeb47df2999aaccfb8" exitCode=0 Oct 03 09:37:52 crc kubenswrapper[4899]: I1003 09:37:52.279396 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rf2ld/crc-debug-lwtkf" event={"ID":"723e953f-e2bb-4cfd-9dae-db74acb34eea","Type":"ContainerDied","Data":"d89c6ed1579bdd35e598a05a3d6f0dd4dfd86e8efdf111aeb47df2999aaccfb8"} Oct 03 09:37:53 crc kubenswrapper[4899]: I1003 09:37:53.400091 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rf2ld/crc-debug-lwtkf" Oct 03 09:37:53 crc kubenswrapper[4899]: I1003 09:37:53.435341 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-rf2ld/crc-debug-lwtkf"] Oct 03 09:37:53 crc kubenswrapper[4899]: I1003 09:37:53.443163 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-rf2ld/crc-debug-lwtkf"] Oct 03 09:37:53 crc kubenswrapper[4899]: I1003 09:37:53.522055 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5j2x\" (UniqueName: \"kubernetes.io/projected/723e953f-e2bb-4cfd-9dae-db74acb34eea-kube-api-access-g5j2x\") pod \"723e953f-e2bb-4cfd-9dae-db74acb34eea\" (UID: \"723e953f-e2bb-4cfd-9dae-db74acb34eea\") " Oct 03 09:37:53 crc kubenswrapper[4899]: I1003 09:37:53.522181 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/723e953f-e2bb-4cfd-9dae-db74acb34eea-host\") pod \"723e953f-e2bb-4cfd-9dae-db74acb34eea\" (UID: \"723e953f-e2bb-4cfd-9dae-db74acb34eea\") " Oct 03 09:37:53 crc kubenswrapper[4899]: I1003 09:37:53.522556 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/723e953f-e2bb-4cfd-9dae-db74acb34eea-host" (OuterVolumeSpecName: "host") pod "723e953f-e2bb-4cfd-9dae-db74acb34eea" (UID: "723e953f-e2bb-4cfd-9dae-db74acb34eea"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 09:37:53 crc kubenswrapper[4899]: I1003 09:37:53.528438 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/723e953f-e2bb-4cfd-9dae-db74acb34eea-kube-api-access-g5j2x" (OuterVolumeSpecName: "kube-api-access-g5j2x") pod "723e953f-e2bb-4cfd-9dae-db74acb34eea" (UID: "723e953f-e2bb-4cfd-9dae-db74acb34eea"). InnerVolumeSpecName "kube-api-access-g5j2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:37:53 crc kubenswrapper[4899]: I1003 09:37:53.624495 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5j2x\" (UniqueName: \"kubernetes.io/projected/723e953f-e2bb-4cfd-9dae-db74acb34eea-kube-api-access-g5j2x\") on node \"crc\" DevicePath \"\"" Oct 03 09:37:53 crc kubenswrapper[4899]: I1003 09:37:53.624667 4899 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/723e953f-e2bb-4cfd-9dae-db74acb34eea-host\") on node \"crc\" DevicePath \"\"" Oct 03 09:37:54 crc kubenswrapper[4899]: I1003 09:37:54.296915 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88b49cd0a04b2d52282d1ee19cc11b121e0c1eb965b5d453bbaa4e121c007f0d" Oct 03 09:37:54 crc kubenswrapper[4899]: I1003 09:37:54.296962 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rf2ld/crc-debug-lwtkf" Oct 03 09:37:54 crc kubenswrapper[4899]: E1003 09:37:54.507168 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod723e953f_e2bb_4cfd_9dae_db74acb34eea.slice/crio-88b49cd0a04b2d52282d1ee19cc11b121e0c1eb965b5d453bbaa4e121c007f0d\": RecentStats: unable to find data in memory cache]" Oct 03 09:37:54 crc kubenswrapper[4899]: I1003 09:37:54.543813 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="723e953f-e2bb-4cfd-9dae-db74acb34eea" path="/var/lib/kubelet/pods/723e953f-e2bb-4cfd-9dae-db74acb34eea/volumes" Oct 03 09:37:54 crc kubenswrapper[4899]: I1003 09:37:54.614103 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-rf2ld/crc-debug-kn855"] Oct 03 09:37:54 crc kubenswrapper[4899]: E1003 09:37:54.614547 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="723e953f-e2bb-4cfd-9dae-db74acb34eea" containerName="container-00" Oct 03 09:37:54 crc kubenswrapper[4899]: I1003 09:37:54.614567 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="723e953f-e2bb-4cfd-9dae-db74acb34eea" containerName="container-00" Oct 03 09:37:54 crc kubenswrapper[4899]: E1003 09:37:54.614594 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b12b4344-46e3-4fbc-80ec-0f69507f6344" containerName="extract-content" Oct 03 09:37:54 crc kubenswrapper[4899]: I1003 09:37:54.614603 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="b12b4344-46e3-4fbc-80ec-0f69507f6344" containerName="extract-content" Oct 03 09:37:54 crc kubenswrapper[4899]: E1003 09:37:54.614618 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b12b4344-46e3-4fbc-80ec-0f69507f6344" containerName="registry-server" Oct 03 09:37:54 crc kubenswrapper[4899]: I1003 09:37:54.614626 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="b12b4344-46e3-4fbc-80ec-0f69507f6344" containerName="registry-server" Oct 03 09:37:54 crc kubenswrapper[4899]: E1003 09:37:54.614652 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b12b4344-46e3-4fbc-80ec-0f69507f6344" containerName="extract-utilities" Oct 03 09:37:54 crc kubenswrapper[4899]: I1003 09:37:54.614661 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="b12b4344-46e3-4fbc-80ec-0f69507f6344" containerName="extract-utilities" Oct 03 09:37:54 crc kubenswrapper[4899]: I1003 09:37:54.614875 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="723e953f-e2bb-4cfd-9dae-db74acb34eea" containerName="container-00" Oct 03 09:37:54 crc kubenswrapper[4899]: I1003 09:37:54.614924 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="b12b4344-46e3-4fbc-80ec-0f69507f6344" containerName="registry-server" Oct 03 09:37:54 crc kubenswrapper[4899]: I1003 09:37:54.615559 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rf2ld/crc-debug-kn855" Oct 03 09:37:54 crc kubenswrapper[4899]: I1003 09:37:54.742354 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5-host\") pod \"crc-debug-kn855\" (UID: \"5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5\") " pod="openshift-must-gather-rf2ld/crc-debug-kn855" Oct 03 09:37:54 crc kubenswrapper[4899]: I1003 09:37:54.742808 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktczp\" (UniqueName: \"kubernetes.io/projected/5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5-kube-api-access-ktczp\") pod \"crc-debug-kn855\" (UID: \"5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5\") " pod="openshift-must-gather-rf2ld/crc-debug-kn855" Oct 03 09:37:54 crc kubenswrapper[4899]: I1003 09:37:54.843931 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktczp\" (UniqueName: \"kubernetes.io/projected/5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5-kube-api-access-ktczp\") pod \"crc-debug-kn855\" (UID: \"5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5\") " pod="openshift-must-gather-rf2ld/crc-debug-kn855" Oct 03 09:37:54 crc kubenswrapper[4899]: I1003 09:37:54.844078 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5-host\") pod \"crc-debug-kn855\" (UID: \"5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5\") " pod="openshift-must-gather-rf2ld/crc-debug-kn855" Oct 03 09:37:54 crc kubenswrapper[4899]: I1003 09:37:54.844264 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5-host\") pod \"crc-debug-kn855\" (UID: \"5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5\") " pod="openshift-must-gather-rf2ld/crc-debug-kn855" Oct 03 09:37:54 crc kubenswrapper[4899]: I1003 09:37:54.866827 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktczp\" (UniqueName: \"kubernetes.io/projected/5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5-kube-api-access-ktczp\") pod \"crc-debug-kn855\" (UID: \"5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5\") " pod="openshift-must-gather-rf2ld/crc-debug-kn855" Oct 03 09:37:54 crc kubenswrapper[4899]: I1003 09:37:54.932587 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rf2ld/crc-debug-kn855" Oct 03 09:37:55 crc kubenswrapper[4899]: I1003 09:37:55.321364 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rf2ld/crc-debug-kn855" event={"ID":"5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5","Type":"ContainerStarted","Data":"bea858ebef3e3c09bb81c722f846b5c18022102a2e4273f5976661e00726108c"} Oct 03 09:37:55 crc kubenswrapper[4899]: I1003 09:37:55.321643 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rf2ld/crc-debug-kn855" event={"ID":"5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5","Type":"ContainerStarted","Data":"68c7141443a5a6525eaa894d13e4599a5ce1667138cc7ced5d5483d0a37d9423"} Oct 03 09:37:55 crc kubenswrapper[4899]: I1003 09:37:55.338203 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-rf2ld/crc-debug-kn855" podStartSLOduration=1.3381822190000001 podStartE2EDuration="1.338182219s" podCreationTimestamp="2025-10-03 09:37:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 09:37:55.334203333 +0000 UTC m=+3449.441688296" watchObservedRunningTime="2025-10-03 09:37:55.338182219 +0000 UTC m=+3449.445667172" Oct 03 09:37:56 crc kubenswrapper[4899]: I1003 09:37:56.335134 4899 generic.go:334] "Generic (PLEG): container finished" podID="5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5" containerID="bea858ebef3e3c09bb81c722f846b5c18022102a2e4273f5976661e00726108c" exitCode=0 Oct 03 09:37:56 crc kubenswrapper[4899]: I1003 09:37:56.335173 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rf2ld/crc-debug-kn855" event={"ID":"5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5","Type":"ContainerDied","Data":"bea858ebef3e3c09bb81c722f846b5c18022102a2e4273f5976661e00726108c"} Oct 03 09:37:57 crc kubenswrapper[4899]: I1003 09:37:57.454116 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rf2ld/crc-debug-kn855" Oct 03 09:37:57 crc kubenswrapper[4899]: I1003 09:37:57.494872 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktczp\" (UniqueName: \"kubernetes.io/projected/5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5-kube-api-access-ktczp\") pod \"5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5\" (UID: \"5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5\") " Oct 03 09:37:57 crc kubenswrapper[4899]: I1003 09:37:57.494974 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5-host\") pod \"5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5\" (UID: \"5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5\") " Oct 03 09:37:57 crc kubenswrapper[4899]: I1003 09:37:57.495165 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5-host" (OuterVolumeSpecName: "host") pod "5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5" (UID: "5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 09:37:57 crc kubenswrapper[4899]: I1003 09:37:57.495687 4899 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5-host\") on node \"crc\" DevicePath \"\"" Oct 03 09:37:57 crc kubenswrapper[4899]: I1003 09:37:57.505283 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5-kube-api-access-ktczp" (OuterVolumeSpecName: "kube-api-access-ktczp") pod "5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5" (UID: "5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5"). InnerVolumeSpecName "kube-api-access-ktczp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:37:57 crc kubenswrapper[4899]: I1003 09:37:57.600187 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktczp\" (UniqueName: \"kubernetes.io/projected/5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5-kube-api-access-ktczp\") on node \"crc\" DevicePath \"\"" Oct 03 09:37:58 crc kubenswrapper[4899]: I1003 09:37:58.359427 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rf2ld/crc-debug-kn855" event={"ID":"5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5","Type":"ContainerDied","Data":"68c7141443a5a6525eaa894d13e4599a5ce1667138cc7ced5d5483d0a37d9423"} Oct 03 09:37:58 crc kubenswrapper[4899]: I1003 09:37:58.359728 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68c7141443a5a6525eaa894d13e4599a5ce1667138cc7ced5d5483d0a37d9423" Oct 03 09:37:58 crc kubenswrapper[4899]: I1003 09:37:58.359511 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rf2ld/crc-debug-kn855" Oct 03 09:38:02 crc kubenswrapper[4899]: I1003 09:38:02.001423 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-rf2ld/crc-debug-kn855"] Oct 03 09:38:02 crc kubenswrapper[4899]: I1003 09:38:02.011254 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-rf2ld/crc-debug-kn855"] Oct 03 09:38:02 crc kubenswrapper[4899]: I1003 09:38:02.540098 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5" path="/var/lib/kubelet/pods/5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5/volumes" Oct 03 09:38:03 crc kubenswrapper[4899]: I1003 09:38:03.154713 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-rf2ld/crc-debug-84hcc"] Oct 03 09:38:03 crc kubenswrapper[4899]: E1003 09:38:03.155267 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5" containerName="container-00" Oct 03 09:38:03 crc kubenswrapper[4899]: I1003 09:38:03.155285 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5" containerName="container-00" Oct 03 09:38:03 crc kubenswrapper[4899]: I1003 09:38:03.155528 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b2e8e59-ab68-4e31-82f6-d0cb7d5cb2a5" containerName="container-00" Oct 03 09:38:03 crc kubenswrapper[4899]: I1003 09:38:03.156290 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rf2ld/crc-debug-84hcc" Oct 03 09:38:03 crc kubenswrapper[4899]: I1003 09:38:03.290677 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gmgk\" (UniqueName: \"kubernetes.io/projected/877a6e11-3f6e-4e1f-acb8-4e7b7b522c16-kube-api-access-2gmgk\") pod \"crc-debug-84hcc\" (UID: \"877a6e11-3f6e-4e1f-acb8-4e7b7b522c16\") " pod="openshift-must-gather-rf2ld/crc-debug-84hcc" Oct 03 09:38:03 crc kubenswrapper[4899]: I1003 09:38:03.290772 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/877a6e11-3f6e-4e1f-acb8-4e7b7b522c16-host\") pod \"crc-debug-84hcc\" (UID: \"877a6e11-3f6e-4e1f-acb8-4e7b7b522c16\") " pod="openshift-must-gather-rf2ld/crc-debug-84hcc" Oct 03 09:38:03 crc kubenswrapper[4899]: I1003 09:38:03.393162 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gmgk\" (UniqueName: \"kubernetes.io/projected/877a6e11-3f6e-4e1f-acb8-4e7b7b522c16-kube-api-access-2gmgk\") pod \"crc-debug-84hcc\" (UID: \"877a6e11-3f6e-4e1f-acb8-4e7b7b522c16\") " pod="openshift-must-gather-rf2ld/crc-debug-84hcc" Oct 03 09:38:03 crc kubenswrapper[4899]: I1003 09:38:03.393229 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/877a6e11-3f6e-4e1f-acb8-4e7b7b522c16-host\") pod \"crc-debug-84hcc\" (UID: \"877a6e11-3f6e-4e1f-acb8-4e7b7b522c16\") " pod="openshift-must-gather-rf2ld/crc-debug-84hcc" Oct 03 09:38:03 crc kubenswrapper[4899]: I1003 09:38:03.393434 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/877a6e11-3f6e-4e1f-acb8-4e7b7b522c16-host\") pod \"crc-debug-84hcc\" (UID: \"877a6e11-3f6e-4e1f-acb8-4e7b7b522c16\") " pod="openshift-must-gather-rf2ld/crc-debug-84hcc" Oct 03 09:38:03 crc kubenswrapper[4899]: I1003 09:38:03.412764 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gmgk\" (UniqueName: \"kubernetes.io/projected/877a6e11-3f6e-4e1f-acb8-4e7b7b522c16-kube-api-access-2gmgk\") pod \"crc-debug-84hcc\" (UID: \"877a6e11-3f6e-4e1f-acb8-4e7b7b522c16\") " pod="openshift-must-gather-rf2ld/crc-debug-84hcc" Oct 03 09:38:03 crc kubenswrapper[4899]: I1003 09:38:03.481059 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rf2ld/crc-debug-84hcc" Oct 03 09:38:03 crc kubenswrapper[4899]: W1003 09:38:03.516431 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod877a6e11_3f6e_4e1f_acb8_4e7b7b522c16.slice/crio-0f3eff6bccdeaf02bffa510f4faf5bd009cc05e9d16e4734b77c4cd6c9169e8f WatchSource:0}: Error finding container 0f3eff6bccdeaf02bffa510f4faf5bd009cc05e9d16e4734b77c4cd6c9169e8f: Status 404 returned error can't find the container with id 0f3eff6bccdeaf02bffa510f4faf5bd009cc05e9d16e4734b77c4cd6c9169e8f Oct 03 09:38:04 crc kubenswrapper[4899]: I1003 09:38:04.426592 4899 generic.go:334] "Generic (PLEG): container finished" podID="877a6e11-3f6e-4e1f-acb8-4e7b7b522c16" containerID="725dece45798eaab15d66cb7db517fdf0a0fe918bc88963584acf4cdce2f09e0" exitCode=0 Oct 03 09:38:04 crc kubenswrapper[4899]: I1003 09:38:04.426650 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rf2ld/crc-debug-84hcc" event={"ID":"877a6e11-3f6e-4e1f-acb8-4e7b7b522c16","Type":"ContainerDied","Data":"725dece45798eaab15d66cb7db517fdf0a0fe918bc88963584acf4cdce2f09e0"} Oct 03 09:38:04 crc kubenswrapper[4899]: I1003 09:38:04.426690 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rf2ld/crc-debug-84hcc" event={"ID":"877a6e11-3f6e-4e1f-acb8-4e7b7b522c16","Type":"ContainerStarted","Data":"0f3eff6bccdeaf02bffa510f4faf5bd009cc05e9d16e4734b77c4cd6c9169e8f"} Oct 03 09:38:04 crc kubenswrapper[4899]: I1003 09:38:04.471635 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-rf2ld/crc-debug-84hcc"] Oct 03 09:38:04 crc kubenswrapper[4899]: I1003 09:38:04.482313 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-rf2ld/crc-debug-84hcc"] Oct 03 09:38:05 crc kubenswrapper[4899]: I1003 09:38:05.532936 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rf2ld/crc-debug-84hcc" Oct 03 09:38:05 crc kubenswrapper[4899]: I1003 09:38:05.533671 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gmgk\" (UniqueName: \"kubernetes.io/projected/877a6e11-3f6e-4e1f-acb8-4e7b7b522c16-kube-api-access-2gmgk\") pod \"877a6e11-3f6e-4e1f-acb8-4e7b7b522c16\" (UID: \"877a6e11-3f6e-4e1f-acb8-4e7b7b522c16\") " Oct 03 09:38:05 crc kubenswrapper[4899]: I1003 09:38:05.533732 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/877a6e11-3f6e-4e1f-acb8-4e7b7b522c16-host\") pod \"877a6e11-3f6e-4e1f-acb8-4e7b7b522c16\" (UID: \"877a6e11-3f6e-4e1f-acb8-4e7b7b522c16\") " Oct 03 09:38:05 crc kubenswrapper[4899]: I1003 09:38:05.534185 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/877a6e11-3f6e-4e1f-acb8-4e7b7b522c16-host" (OuterVolumeSpecName: "host") pod "877a6e11-3f6e-4e1f-acb8-4e7b7b522c16" (UID: "877a6e11-3f6e-4e1f-acb8-4e7b7b522c16"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 09:38:05 crc kubenswrapper[4899]: I1003 09:38:05.539929 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/877a6e11-3f6e-4e1f-acb8-4e7b7b522c16-kube-api-access-2gmgk" (OuterVolumeSpecName: "kube-api-access-2gmgk") pod "877a6e11-3f6e-4e1f-acb8-4e7b7b522c16" (UID: "877a6e11-3f6e-4e1f-acb8-4e7b7b522c16"). InnerVolumeSpecName "kube-api-access-2gmgk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:38:05 crc kubenswrapper[4899]: I1003 09:38:05.636036 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gmgk\" (UniqueName: \"kubernetes.io/projected/877a6e11-3f6e-4e1f-acb8-4e7b7b522c16-kube-api-access-2gmgk\") on node \"crc\" DevicePath \"\"" Oct 03 09:38:05 crc kubenswrapper[4899]: I1003 09:38:05.636288 4899 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/877a6e11-3f6e-4e1f-acb8-4e7b7b522c16-host\") on node \"crc\" DevicePath \"\"" Oct 03 09:38:05 crc kubenswrapper[4899]: I1003 09:38:05.949692 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d_f5e06749-553d-438a-b1be-4db08df71d67/util/0.log" Oct 03 09:38:06 crc kubenswrapper[4899]: I1003 09:38:06.171255 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d_f5e06749-553d-438a-b1be-4db08df71d67/util/0.log" Oct 03 09:38:06 crc kubenswrapper[4899]: I1003 09:38:06.192478 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d_f5e06749-553d-438a-b1be-4db08df71d67/pull/0.log" Oct 03 09:38:06 crc kubenswrapper[4899]: I1003 09:38:06.233508 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d_f5e06749-553d-438a-b1be-4db08df71d67/pull/0.log" Oct 03 09:38:06 crc kubenswrapper[4899]: I1003 09:38:06.348559 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d_f5e06749-553d-438a-b1be-4db08df71d67/util/0.log" Oct 03 09:38:06 crc kubenswrapper[4899]: I1003 09:38:06.374311 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d_f5e06749-553d-438a-b1be-4db08df71d67/extract/0.log" Oct 03 09:38:06 crc kubenswrapper[4899]: I1003 09:38:06.416183 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d_f5e06749-553d-438a-b1be-4db08df71d67/pull/0.log" Oct 03 09:38:06 crc kubenswrapper[4899]: I1003 09:38:06.446511 4899 scope.go:117] "RemoveContainer" containerID="725dece45798eaab15d66cb7db517fdf0a0fe918bc88963584acf4cdce2f09e0" Oct 03 09:38:06 crc kubenswrapper[4899]: I1003 09:38:06.446703 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rf2ld/crc-debug-84hcc" Oct 03 09:38:06 crc kubenswrapper[4899]: I1003 09:38:06.541576 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="877a6e11-3f6e-4e1f-acb8-4e7b7b522c16" path="/var/lib/kubelet/pods/877a6e11-3f6e-4e1f-acb8-4e7b7b522c16/volumes" Oct 03 09:38:06 crc kubenswrapper[4899]: I1003 09:38:06.546173 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6c675fb79f-hz7qr_c90f297d-af70-423f-b34d-8b3599ba12eb/kube-rbac-proxy/0.log" Oct 03 09:38:06 crc kubenswrapper[4899]: I1003 09:38:06.622198 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6c675fb79f-hz7qr_c90f297d-af70-423f-b34d-8b3599ba12eb/manager/0.log" Oct 03 09:38:06 crc kubenswrapper[4899]: I1003 09:38:06.659045 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-79d68d6c85-bj2gj_3e876467-fd1a-4b4c-b62b-d1641400a756/kube-rbac-proxy/0.log" Oct 03 09:38:06 crc kubenswrapper[4899]: I1003 09:38:06.783340 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-79d68d6c85-bj2gj_3e876467-fd1a-4b4c-b62b-d1641400a756/manager/0.log" Oct 03 09:38:06 crc kubenswrapper[4899]: I1003 09:38:06.868308 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-jqgrz_ba75f5b9-b92b-4cd7-98c9-1bcf6b772940/manager/0.log" Oct 03 09:38:06 crc kubenswrapper[4899]: I1003 09:38:06.877991 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-jqgrz_ba75f5b9-b92b-4cd7-98c9-1bcf6b772940/kube-rbac-proxy/0.log" Oct 03 09:38:07 crc kubenswrapper[4899]: I1003 09:38:07.055157 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-846dff85b5-mhkqv_b32f9b3e-72a8-4229-9715-8fdd98877a04/kube-rbac-proxy/0.log" Oct 03 09:38:07 crc kubenswrapper[4899]: I1003 09:38:07.116349 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-846dff85b5-mhkqv_b32f9b3e-72a8-4229-9715-8fdd98877a04/manager/0.log" Oct 03 09:38:07 crc kubenswrapper[4899]: I1003 09:38:07.233279 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-599898f689-5c9bs_b14b68d1-483a-419c-b696-a915c6d25d09/manager/0.log" Oct 03 09:38:07 crc kubenswrapper[4899]: I1003 09:38:07.294354 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-599898f689-5c9bs_b14b68d1-483a-419c-b696-a915c6d25d09/kube-rbac-proxy/0.log" Oct 03 09:38:07 crc kubenswrapper[4899]: I1003 09:38:07.327679 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6769b867d9-k8snv_a8895f13-915f-45f7-8156-43a7f11ac9bb/kube-rbac-proxy/0.log" Oct 03 09:38:07 crc kubenswrapper[4899]: I1003 09:38:07.440592 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6769b867d9-k8snv_a8895f13-915f-45f7-8156-43a7f11ac9bb/manager/0.log" Oct 03 09:38:07 crc kubenswrapper[4899]: I1003 09:38:07.497997 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5fbf469cd7-9qcj2_2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919/kube-rbac-proxy/0.log" Oct 03 09:38:07 crc kubenswrapper[4899]: I1003 09:38:07.670496 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-84bc9db6cc-hpqjs_39eb57f7-d61f-4445-aea3-6b96585c4f76/kube-rbac-proxy/0.log" Oct 03 09:38:07 crc kubenswrapper[4899]: I1003 09:38:07.673205 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5fbf469cd7-9qcj2_2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919/manager/0.log" Oct 03 09:38:07 crc kubenswrapper[4899]: I1003 09:38:07.725170 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-84bc9db6cc-hpqjs_39eb57f7-d61f-4445-aea3-6b96585c4f76/manager/0.log" Oct 03 09:38:07 crc kubenswrapper[4899]: I1003 09:38:07.882411 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7f55849f88-qxxgx_d8d28854-8e4e-47cd-847a-c58811fb4f91/kube-rbac-proxy/0.log" Oct 03 09:38:07 crc kubenswrapper[4899]: I1003 09:38:07.976554 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7f55849f88-qxxgx_d8d28854-8e4e-47cd-847a-c58811fb4f91/manager/0.log" Oct 03 09:38:08 crc kubenswrapper[4899]: I1003 09:38:08.102947 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6fd6854b49-hwhx2_ed44541c-bb31-43bb-92eb-298b01820505/kube-rbac-proxy/0.log" Oct 03 09:38:08 crc kubenswrapper[4899]: I1003 09:38:08.120724 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6fd6854b49-hwhx2_ed44541c-bb31-43bb-92eb-298b01820505/manager/0.log" Oct 03 09:38:08 crc kubenswrapper[4899]: I1003 09:38:08.191958 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5c468bf4d4-trrmd_110672ad-3117-4a7c-8614-f12ab626e28c/kube-rbac-proxy/0.log" Oct 03 09:38:08 crc kubenswrapper[4899]: I1003 09:38:08.303870 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5c468bf4d4-trrmd_110672ad-3117-4a7c-8614-f12ab626e28c/manager/0.log" Oct 03 09:38:08 crc kubenswrapper[4899]: I1003 09:38:08.364770 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6574bf987d-dglwx_f500aadc-0447-4d26-9ab4-83f64b084a89/kube-rbac-proxy/0.log" Oct 03 09:38:08 crc kubenswrapper[4899]: I1003 09:38:08.449223 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6574bf987d-dglwx_f500aadc-0447-4d26-9ab4-83f64b084a89/manager/0.log" Oct 03 09:38:08 crc kubenswrapper[4899]: I1003 09:38:08.535630 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-555c7456bd-78vk7_20ecf87a-f08f-4d2f-92fd-ba14a9a9e5b2/kube-rbac-proxy/0.log" Oct 03 09:38:08 crc kubenswrapper[4899]: I1003 09:38:08.652750 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-555c7456bd-78vk7_20ecf87a-f08f-4d2f-92fd-ba14a9a9e5b2/manager/0.log" Oct 03 09:38:08 crc kubenswrapper[4899]: I1003 09:38:08.775877 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-59d6cfdf45-7zf22_de3ec379-fb48-440a-8502-3650db78804a/manager/0.log" Oct 03 09:38:08 crc kubenswrapper[4899]: I1003 09:38:08.780929 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-59d6cfdf45-7zf22_de3ec379-fb48-440a-8502-3650db78804a/kube-rbac-proxy/0.log" Oct 03 09:38:08 crc kubenswrapper[4899]: I1003 09:38:08.958156 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg_62eda81d-d797-4ed4-9687-9cdc7c49decb/kube-rbac-proxy/0.log" Oct 03 09:38:08 crc kubenswrapper[4899]: I1003 09:38:08.967542 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg_62eda81d-d797-4ed4-9687-9cdc7c49decb/manager/0.log" Oct 03 09:38:09 crc kubenswrapper[4899]: I1003 09:38:09.135232 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7cfd4b6679-jn88c_2bb995f5-5432-40fc-a196-71cac18de666/kube-rbac-proxy/0.log" Oct 03 09:38:09 crc kubenswrapper[4899]: I1003 09:38:09.317704 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-669c8666b5-gt89m_99f6fa80-7e18-4fa5-8421-8ff2e51fbdbc/kube-rbac-proxy/0.log" Oct 03 09:38:09 crc kubenswrapper[4899]: I1003 09:38:09.512004 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-cq96k_3e217d24-f3ae-48f2-87bb-d9b735659f5d/registry-server/0.log" Oct 03 09:38:09 crc kubenswrapper[4899]: I1003 09:38:09.522867 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-669c8666b5-gt89m_99f6fa80-7e18-4fa5-8421-8ff2e51fbdbc/operator/0.log" Oct 03 09:38:09 crc kubenswrapper[4899]: I1003 09:38:09.756013 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-688db7b6c7-7psz6_2cbb69db-51ad-471c-be3a-57b9422f11cd/kube-rbac-proxy/0.log" Oct 03 09:38:09 crc kubenswrapper[4899]: I1003 09:38:09.862953 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-688db7b6c7-7psz6_2cbb69db-51ad-471c-be3a-57b9422f11cd/manager/0.log" Oct 03 09:38:09 crc kubenswrapper[4899]: I1003 09:38:09.940612 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-7d8bb7f44c-bnb75_6008780d-5be3-4fda-8526-594566364ae4/kube-rbac-proxy/0.log" Oct 03 09:38:10 crc kubenswrapper[4899]: I1003 09:38:10.008838 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-7d8bb7f44c-bnb75_6008780d-5be3-4fda-8526-594566364ae4/manager/0.log" Oct 03 09:38:10 crc kubenswrapper[4899]: I1003 09:38:10.214657 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-n8q4j_5c4913b3-fd20-4eee-99df-1900f5486f51/operator/0.log" Oct 03 09:38:10 crc kubenswrapper[4899]: I1003 09:38:10.262885 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-f5mgb_837b07a9-5832-4d01-b257-ac3fca82b121/kube-rbac-proxy/0.log" Oct 03 09:38:10 crc kubenswrapper[4899]: I1003 09:38:10.334117 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-f5mgb_837b07a9-5832-4d01-b257-ac3fca82b121/manager/0.log" Oct 03 09:38:10 crc kubenswrapper[4899]: I1003 09:38:10.410709 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7cfd4b6679-jn88c_2bb995f5-5432-40fc-a196-71cac18de666/manager/0.log" Oct 03 09:38:10 crc kubenswrapper[4899]: I1003 09:38:10.473455 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5db5cf686f-zvwbw_6c6cb9a6-eacf-411e-8c19-ac8ee51eced8/kube-rbac-proxy/0.log" Oct 03 09:38:10 crc kubenswrapper[4899]: I1003 09:38:10.536349 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5db5cf686f-zvwbw_6c6cb9a6-eacf-411e-8c19-ac8ee51eced8/manager/0.log" Oct 03 09:38:10 crc kubenswrapper[4899]: I1003 09:38:10.594539 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-lpwr4_03abda9b-2057-42c8-8161-4104ecb96027/manager/0.log" Oct 03 09:38:10 crc kubenswrapper[4899]: I1003 09:38:10.624917 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-lpwr4_03abda9b-2057-42c8-8161-4104ecb96027/kube-rbac-proxy/0.log" Oct 03 09:38:10 crc kubenswrapper[4899]: I1003 09:38:10.744622 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-fcd7d9895-lxmmw_7496bf16-1fc1-44ec-b96b-e75e00652634/kube-rbac-proxy/0.log" Oct 03 09:38:10 crc kubenswrapper[4899]: I1003 09:38:10.745924 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-fcd7d9895-lxmmw_7496bf16-1fc1-44ec-b96b-e75e00652634/manager/0.log" Oct 03 09:38:24 crc kubenswrapper[4899]: I1003 09:38:24.699921 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-f8jvt_ae242b31-ad12-4328-8818-313458ed46aa/control-plane-machine-set-operator/0.log" Oct 03 09:38:24 crc kubenswrapper[4899]: I1003 09:38:24.867573 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-lw5xr_a8b9468d-675b-42d9-b5e8-b45f5d35deef/kube-rbac-proxy/0.log" Oct 03 09:38:24 crc kubenswrapper[4899]: I1003 09:38:24.924007 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-lw5xr_a8b9468d-675b-42d9-b5e8-b45f5d35deef/machine-api-operator/0.log" Oct 03 09:38:35 crc kubenswrapper[4899]: I1003 09:38:35.613442 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-r2625_9892f98e-dee3-42ea-88c5-2a17dc19988d/cert-manager-controller/0.log" Oct 03 09:38:35 crc kubenswrapper[4899]: I1003 09:38:35.747348 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-prrpr_00a2b8ef-3f94-4a34-8692-6fd9fa800cd9/cert-manager-cainjector/0.log" Oct 03 09:38:35 crc kubenswrapper[4899]: I1003 09:38:35.799571 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-jjw6p_355746c7-f59d-41d1-9cbe-c3668e16d478/cert-manager-webhook/0.log" Oct 03 09:38:46 crc kubenswrapper[4899]: I1003 09:38:46.348911 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-9lk8l_932306e1-0688-47a3-af53-642db1b63eb0/nmstate-console-plugin/0.log" Oct 03 09:38:46 crc kubenswrapper[4899]: I1003 09:38:46.448358 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-wggc2_3bcb7a6a-9902-4fea-a7d2-c7a508d7f695/nmstate-handler/0.log" Oct 03 09:38:46 crc kubenswrapper[4899]: I1003 09:38:46.513086 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-jqv6w_e97cb3b7-2cf2-4021-b189-0e4c79b60f9a/kube-rbac-proxy/0.log" Oct 03 09:38:46 crc kubenswrapper[4899]: I1003 09:38:46.550852 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-jqv6w_e97cb3b7-2cf2-4021-b189-0e4c79b60f9a/nmstate-metrics/0.log" Oct 03 09:38:46 crc kubenswrapper[4899]: I1003 09:38:46.756191 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-kf8v5_65081539-f48e-404c-96a9-c1f8035404ed/nmstate-operator/0.log" Oct 03 09:38:46 crc kubenswrapper[4899]: I1003 09:38:46.761328 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-65rlw_cfd8a398-f0f8-47ed-9f92-49edea78e66b/nmstate-webhook/0.log" Oct 03 09:38:59 crc kubenswrapper[4899]: I1003 09:38:59.345102 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-mgk7c_8ec8f47a-3de7-4e04-a612-dbf72a0a21d5/kube-rbac-proxy/0.log" Oct 03 09:38:59 crc kubenswrapper[4899]: I1003 09:38:59.444686 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-mgk7c_8ec8f47a-3de7-4e04-a612-dbf72a0a21d5/controller/0.log" Oct 03 09:38:59 crc kubenswrapper[4899]: I1003 09:38:59.576615 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-frr-files/0.log" Oct 03 09:38:59 crc kubenswrapper[4899]: I1003 09:38:59.748600 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-frr-files/0.log" Oct 03 09:38:59 crc kubenswrapper[4899]: I1003 09:38:59.780466 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-metrics/0.log" Oct 03 09:38:59 crc kubenswrapper[4899]: I1003 09:38:59.794789 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-reloader/0.log" Oct 03 09:38:59 crc kubenswrapper[4899]: I1003 09:38:59.795231 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-reloader/0.log" Oct 03 09:38:59 crc kubenswrapper[4899]: I1003 09:38:59.959843 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-frr-files/0.log" Oct 03 09:38:59 crc kubenswrapper[4899]: I1003 09:38:59.999112 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-metrics/0.log" Oct 03 09:39:00 crc kubenswrapper[4899]: I1003 09:39:00.016703 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-metrics/0.log" Oct 03 09:39:00 crc kubenswrapper[4899]: I1003 09:39:00.022347 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-reloader/0.log" Oct 03 09:39:00 crc kubenswrapper[4899]: I1003 09:39:00.134571 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-frr-files/0.log" Oct 03 09:39:00 crc kubenswrapper[4899]: I1003 09:39:00.178255 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-metrics/0.log" Oct 03 09:39:00 crc kubenswrapper[4899]: I1003 09:39:00.179178 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-reloader/0.log" Oct 03 09:39:00 crc kubenswrapper[4899]: I1003 09:39:00.240169 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/controller/0.log" Oct 03 09:39:00 crc kubenswrapper[4899]: I1003 09:39:00.370967 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/frr-metrics/0.log" Oct 03 09:39:00 crc kubenswrapper[4899]: I1003 09:39:00.377316 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/kube-rbac-proxy/0.log" Oct 03 09:39:00 crc kubenswrapper[4899]: I1003 09:39:00.497321 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/kube-rbac-proxy-frr/0.log" Oct 03 09:39:00 crc kubenswrapper[4899]: I1003 09:39:00.579381 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/reloader/0.log" Oct 03 09:39:00 crc kubenswrapper[4899]: I1003 09:39:00.725305 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-6t985_d43f846f-0e5f-4bb8-9041-6b471ca7e6df/frr-k8s-webhook-server/0.log" Oct 03 09:39:00 crc kubenswrapper[4899]: I1003 09:39:00.883344 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7dd48c8965-pvfqh_8643a4de-b352-4699-8054-7d4e4f97a946/manager/0.log" Oct 03 09:39:01 crc kubenswrapper[4899]: I1003 09:39:01.039405 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-798f6d5f9c-6p988_cd92ea76-f379-4b3d-aac6-2143d789e086/webhook-server/0.log" Oct 03 09:39:01 crc kubenswrapper[4899]: I1003 09:39:01.162139 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-9jkt6_94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6/kube-rbac-proxy/0.log" Oct 03 09:39:01 crc kubenswrapper[4899]: I1003 09:39:01.729236 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-9jkt6_94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6/speaker/0.log" Oct 03 09:39:01 crc kubenswrapper[4899]: I1003 09:39:01.889996 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/frr/0.log" Oct 03 09:39:11 crc kubenswrapper[4899]: I1003 09:39:11.775917 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-r6m6r"] Oct 03 09:39:11 crc kubenswrapper[4899]: E1003 09:39:11.776778 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="877a6e11-3f6e-4e1f-acb8-4e7b7b522c16" containerName="container-00" Oct 03 09:39:11 crc kubenswrapper[4899]: I1003 09:39:11.776792 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="877a6e11-3f6e-4e1f-acb8-4e7b7b522c16" containerName="container-00" Oct 03 09:39:11 crc kubenswrapper[4899]: I1003 09:39:11.777054 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="877a6e11-3f6e-4e1f-acb8-4e7b7b522c16" containerName="container-00" Oct 03 09:39:11 crc kubenswrapper[4899]: I1003 09:39:11.781423 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r6m6r" Oct 03 09:39:11 crc kubenswrapper[4899]: I1003 09:39:11.791329 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r6m6r"] Oct 03 09:39:11 crc kubenswrapper[4899]: I1003 09:39:11.890638 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48c571bf-c1f1-4454-b47b-fe5f9a858ce8-utilities\") pod \"certified-operators-r6m6r\" (UID: \"48c571bf-c1f1-4454-b47b-fe5f9a858ce8\") " pod="openshift-marketplace/certified-operators-r6m6r" Oct 03 09:39:11 crc kubenswrapper[4899]: I1003 09:39:11.890791 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48c571bf-c1f1-4454-b47b-fe5f9a858ce8-catalog-content\") pod \"certified-operators-r6m6r\" (UID: \"48c571bf-c1f1-4454-b47b-fe5f9a858ce8\") " pod="openshift-marketplace/certified-operators-r6m6r" Oct 03 09:39:11 crc kubenswrapper[4899]: I1003 09:39:11.890837 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxthr\" (UniqueName: \"kubernetes.io/projected/48c571bf-c1f1-4454-b47b-fe5f9a858ce8-kube-api-access-bxthr\") pod \"certified-operators-r6m6r\" (UID: \"48c571bf-c1f1-4454-b47b-fe5f9a858ce8\") " pod="openshift-marketplace/certified-operators-r6m6r" Oct 03 09:39:11 crc kubenswrapper[4899]: I1003 09:39:11.993275 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48c571bf-c1f1-4454-b47b-fe5f9a858ce8-catalog-content\") pod \"certified-operators-r6m6r\" (UID: \"48c571bf-c1f1-4454-b47b-fe5f9a858ce8\") " pod="openshift-marketplace/certified-operators-r6m6r" Oct 03 09:39:11 crc kubenswrapper[4899]: I1003 09:39:11.993365 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxthr\" (UniqueName: \"kubernetes.io/projected/48c571bf-c1f1-4454-b47b-fe5f9a858ce8-kube-api-access-bxthr\") pod \"certified-operators-r6m6r\" (UID: \"48c571bf-c1f1-4454-b47b-fe5f9a858ce8\") " pod="openshift-marketplace/certified-operators-r6m6r" Oct 03 09:39:11 crc kubenswrapper[4899]: I1003 09:39:11.993428 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48c571bf-c1f1-4454-b47b-fe5f9a858ce8-utilities\") pod \"certified-operators-r6m6r\" (UID: \"48c571bf-c1f1-4454-b47b-fe5f9a858ce8\") " pod="openshift-marketplace/certified-operators-r6m6r" Oct 03 09:39:11 crc kubenswrapper[4899]: I1003 09:39:11.993841 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48c571bf-c1f1-4454-b47b-fe5f9a858ce8-catalog-content\") pod \"certified-operators-r6m6r\" (UID: \"48c571bf-c1f1-4454-b47b-fe5f9a858ce8\") " pod="openshift-marketplace/certified-operators-r6m6r" Oct 03 09:39:11 crc kubenswrapper[4899]: I1003 09:39:11.994033 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48c571bf-c1f1-4454-b47b-fe5f9a858ce8-utilities\") pod \"certified-operators-r6m6r\" (UID: \"48c571bf-c1f1-4454-b47b-fe5f9a858ce8\") " pod="openshift-marketplace/certified-operators-r6m6r" Oct 03 09:39:12 crc kubenswrapper[4899]: I1003 09:39:12.021856 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxthr\" (UniqueName: \"kubernetes.io/projected/48c571bf-c1f1-4454-b47b-fe5f9a858ce8-kube-api-access-bxthr\") pod \"certified-operators-r6m6r\" (UID: \"48c571bf-c1f1-4454-b47b-fe5f9a858ce8\") " pod="openshift-marketplace/certified-operators-r6m6r" Oct 03 09:39:12 crc kubenswrapper[4899]: I1003 09:39:12.110935 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r6m6r" Oct 03 09:39:12 crc kubenswrapper[4899]: I1003 09:39:12.199274 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:39:12 crc kubenswrapper[4899]: I1003 09:39:12.199360 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:39:12 crc kubenswrapper[4899]: I1003 09:39:12.623981 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r6m6r"] Oct 03 09:39:13 crc kubenswrapper[4899]: I1003 09:39:13.024922 4899 generic.go:334] "Generic (PLEG): container finished" podID="48c571bf-c1f1-4454-b47b-fe5f9a858ce8" containerID="7c148674106fda11a412cbc783e8bead455849d691dabc92352ff9dc500df2e4" exitCode=0 Oct 03 09:39:13 crc kubenswrapper[4899]: I1003 09:39:13.025092 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6m6r" event={"ID":"48c571bf-c1f1-4454-b47b-fe5f9a858ce8","Type":"ContainerDied","Data":"7c148674106fda11a412cbc783e8bead455849d691dabc92352ff9dc500df2e4"} Oct 03 09:39:13 crc kubenswrapper[4899]: I1003 09:39:13.025155 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6m6r" event={"ID":"48c571bf-c1f1-4454-b47b-fe5f9a858ce8","Type":"ContainerStarted","Data":"9da43e72a8c2be5d71333840a0b7a77bc5d5f4c68b8e129db9f51ebec711a897"} Oct 03 09:39:13 crc kubenswrapper[4899]: I1003 09:39:13.120152 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb_ab7bdf03-1685-4b51-b1a5-db0c9c4aa575/util/0.log" Oct 03 09:39:13 crc kubenswrapper[4899]: I1003 09:39:13.296374 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb_ab7bdf03-1685-4b51-b1a5-db0c9c4aa575/util/0.log" Oct 03 09:39:13 crc kubenswrapper[4899]: I1003 09:39:13.316181 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb_ab7bdf03-1685-4b51-b1a5-db0c9c4aa575/pull/0.log" Oct 03 09:39:13 crc kubenswrapper[4899]: I1003 09:39:13.325436 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb_ab7bdf03-1685-4b51-b1a5-db0c9c4aa575/pull/0.log" Oct 03 09:39:13 crc kubenswrapper[4899]: I1003 09:39:13.470606 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb_ab7bdf03-1685-4b51-b1a5-db0c9c4aa575/pull/0.log" Oct 03 09:39:13 crc kubenswrapper[4899]: I1003 09:39:13.496467 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb_ab7bdf03-1685-4b51-b1a5-db0c9c4aa575/util/0.log" Oct 03 09:39:13 crc kubenswrapper[4899]: I1003 09:39:13.513929 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb_ab7bdf03-1685-4b51-b1a5-db0c9c4aa575/extract/0.log" Oct 03 09:39:13 crc kubenswrapper[4899]: I1003 09:39:13.643504 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2xq2p_3e316fba-9f8d-4fb6-9adf-0c8842bdf476/extract-utilities/0.log" Oct 03 09:39:13 crc kubenswrapper[4899]: I1003 09:39:13.830087 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2xq2p_3e316fba-9f8d-4fb6-9adf-0c8842bdf476/extract-content/0.log" Oct 03 09:39:13 crc kubenswrapper[4899]: I1003 09:39:13.839949 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2xq2p_3e316fba-9f8d-4fb6-9adf-0c8842bdf476/extract-utilities/0.log" Oct 03 09:39:13 crc kubenswrapper[4899]: I1003 09:39:13.857102 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2xq2p_3e316fba-9f8d-4fb6-9adf-0c8842bdf476/extract-content/0.log" Oct 03 09:39:14 crc kubenswrapper[4899]: I1003 09:39:14.056124 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6m6r" event={"ID":"48c571bf-c1f1-4454-b47b-fe5f9a858ce8","Type":"ContainerStarted","Data":"c227ba7624ebcbf3c8f2193c02db99bb500a145a6f07dcefc301ed9762068c86"} Oct 03 09:39:14 crc kubenswrapper[4899]: I1003 09:39:14.066843 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2xq2p_3e316fba-9f8d-4fb6-9adf-0c8842bdf476/extract-content/0.log" Oct 03 09:39:14 crc kubenswrapper[4899]: I1003 09:39:14.071639 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2xq2p_3e316fba-9f8d-4fb6-9adf-0c8842bdf476/extract-utilities/0.log" Oct 03 09:39:14 crc kubenswrapper[4899]: I1003 09:39:14.277384 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-r6m6r_48c571bf-c1f1-4454-b47b-fe5f9a858ce8/extract-utilities/0.log" Oct 03 09:39:14 crc kubenswrapper[4899]: I1003 09:39:14.510266 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-r6m6r_48c571bf-c1f1-4454-b47b-fe5f9a858ce8/extract-utilities/0.log" Oct 03 09:39:14 crc kubenswrapper[4899]: I1003 09:39:14.589403 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-r6m6r_48c571bf-c1f1-4454-b47b-fe5f9a858ce8/extract-content/0.log" Oct 03 09:39:14 crc kubenswrapper[4899]: I1003 09:39:14.605086 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-r6m6r_48c571bf-c1f1-4454-b47b-fe5f9a858ce8/extract-content/0.log" Oct 03 09:39:14 crc kubenswrapper[4899]: I1003 09:39:14.620587 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2xq2p_3e316fba-9f8d-4fb6-9adf-0c8842bdf476/registry-server/0.log" Oct 03 09:39:14 crc kubenswrapper[4899]: I1003 09:39:14.753495 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-r6m6r_48c571bf-c1f1-4454-b47b-fe5f9a858ce8/extract-utilities/0.log" Oct 03 09:39:14 crc kubenswrapper[4899]: I1003 09:39:14.776152 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-r6m6r_48c571bf-c1f1-4454-b47b-fe5f9a858ce8/extract-content/0.log" Oct 03 09:39:14 crc kubenswrapper[4899]: I1003 09:39:14.894179 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xw5wc_7acc767f-da7a-4e39-90e2-f504c5b40827/extract-utilities/0.log" Oct 03 09:39:14 crc kubenswrapper[4899]: I1003 09:39:14.974125 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zlrl8"] Oct 03 09:39:14 crc kubenswrapper[4899]: I1003 09:39:14.976225 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zlrl8" Oct 03 09:39:14 crc kubenswrapper[4899]: I1003 09:39:14.983335 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zlrl8"] Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.066380 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e41b7bf-1c29-4178-86f8-baed90d93665-catalog-content\") pod \"community-operators-zlrl8\" (UID: \"2e41b7bf-1c29-4178-86f8-baed90d93665\") " pod="openshift-marketplace/community-operators-zlrl8" Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.066530 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b79rg\" (UniqueName: \"kubernetes.io/projected/2e41b7bf-1c29-4178-86f8-baed90d93665-kube-api-access-b79rg\") pod \"community-operators-zlrl8\" (UID: \"2e41b7bf-1c29-4178-86f8-baed90d93665\") " pod="openshift-marketplace/community-operators-zlrl8" Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.066560 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e41b7bf-1c29-4178-86f8-baed90d93665-utilities\") pod \"community-operators-zlrl8\" (UID: \"2e41b7bf-1c29-4178-86f8-baed90d93665\") " pod="openshift-marketplace/community-operators-zlrl8" Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.069026 4899 generic.go:334] "Generic (PLEG): container finished" podID="48c571bf-c1f1-4454-b47b-fe5f9a858ce8" containerID="c227ba7624ebcbf3c8f2193c02db99bb500a145a6f07dcefc301ed9762068c86" exitCode=0 Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.069069 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6m6r" event={"ID":"48c571bf-c1f1-4454-b47b-fe5f9a858ce8","Type":"ContainerDied","Data":"c227ba7624ebcbf3c8f2193c02db99bb500a145a6f07dcefc301ed9762068c86"} Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.168993 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b79rg\" (UniqueName: \"kubernetes.io/projected/2e41b7bf-1c29-4178-86f8-baed90d93665-kube-api-access-b79rg\") pod \"community-operators-zlrl8\" (UID: \"2e41b7bf-1c29-4178-86f8-baed90d93665\") " pod="openshift-marketplace/community-operators-zlrl8" Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.169285 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e41b7bf-1c29-4178-86f8-baed90d93665-utilities\") pod \"community-operators-zlrl8\" (UID: \"2e41b7bf-1c29-4178-86f8-baed90d93665\") " pod="openshift-marketplace/community-operators-zlrl8" Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.169471 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e41b7bf-1c29-4178-86f8-baed90d93665-catalog-content\") pod \"community-operators-zlrl8\" (UID: \"2e41b7bf-1c29-4178-86f8-baed90d93665\") " pod="openshift-marketplace/community-operators-zlrl8" Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.169872 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e41b7bf-1c29-4178-86f8-baed90d93665-utilities\") pod \"community-operators-zlrl8\" (UID: \"2e41b7bf-1c29-4178-86f8-baed90d93665\") " pod="openshift-marketplace/community-operators-zlrl8" Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.169936 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e41b7bf-1c29-4178-86f8-baed90d93665-catalog-content\") pod \"community-operators-zlrl8\" (UID: \"2e41b7bf-1c29-4178-86f8-baed90d93665\") " pod="openshift-marketplace/community-operators-zlrl8" Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.191606 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b79rg\" (UniqueName: \"kubernetes.io/projected/2e41b7bf-1c29-4178-86f8-baed90d93665-kube-api-access-b79rg\") pod \"community-operators-zlrl8\" (UID: \"2e41b7bf-1c29-4178-86f8-baed90d93665\") " pod="openshift-marketplace/community-operators-zlrl8" Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.201163 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xw5wc_7acc767f-da7a-4e39-90e2-f504c5b40827/extract-content/0.log" Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.301696 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zlrl8" Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.361613 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xw5wc_7acc767f-da7a-4e39-90e2-f504c5b40827/extract-utilities/0.log" Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.366142 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xw5wc_7acc767f-da7a-4e39-90e2-f504c5b40827/extract-content/0.log" Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.596291 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xw5wc_7acc767f-da7a-4e39-90e2-f504c5b40827/extract-utilities/0.log" Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.694207 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xw5wc_7acc767f-da7a-4e39-90e2-f504c5b40827/extract-content/0.log" Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.879814 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45_c4f4d920-9e4c-4828-89dd-4e95975d5ec8/util/0.log" Oct 03 09:39:15 crc kubenswrapper[4899]: I1003 09:39:15.943552 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zlrl8"] Oct 03 09:39:15 crc kubenswrapper[4899]: W1003 09:39:15.946867 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2e41b7bf_1c29_4178_86f8_baed90d93665.slice/crio-f2ee029d4b97aa383bc47859690a308181935ea97c527318aead2b2242229381 WatchSource:0}: Error finding container f2ee029d4b97aa383bc47859690a308181935ea97c527318aead2b2242229381: Status 404 returned error can't find the container with id f2ee029d4b97aa383bc47859690a308181935ea97c527318aead2b2242229381 Oct 03 09:39:16 crc kubenswrapper[4899]: I1003 09:39:16.097314 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6m6r" event={"ID":"48c571bf-c1f1-4454-b47b-fe5f9a858ce8","Type":"ContainerStarted","Data":"7f09d7ee9333e00d280ac51816f9e6fce14ad6c8ad728ab492e45f64152ee39e"} Oct 03 09:39:16 crc kubenswrapper[4899]: I1003 09:39:16.101175 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlrl8" event={"ID":"2e41b7bf-1c29-4178-86f8-baed90d93665","Type":"ContainerStarted","Data":"f2ee029d4b97aa383bc47859690a308181935ea97c527318aead2b2242229381"} Oct 03 09:39:16 crc kubenswrapper[4899]: I1003 09:39:16.112820 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xw5wc_7acc767f-da7a-4e39-90e2-f504c5b40827/registry-server/0.log" Oct 03 09:39:16 crc kubenswrapper[4899]: I1003 09:39:16.119411 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-r6m6r" podStartSLOduration=2.467297261 podStartE2EDuration="5.119393701s" podCreationTimestamp="2025-10-03 09:39:11 +0000 UTC" firstStartedPulling="2025-10-03 09:39:13.026892348 +0000 UTC m=+3527.134377301" lastFinishedPulling="2025-10-03 09:39:15.678988788 +0000 UTC m=+3529.786473741" observedRunningTime="2025-10-03 09:39:16.114031202 +0000 UTC m=+3530.221516155" watchObservedRunningTime="2025-10-03 09:39:16.119393701 +0000 UTC m=+3530.226878654" Oct 03 09:39:16 crc kubenswrapper[4899]: I1003 09:39:16.146798 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45_c4f4d920-9e4c-4828-89dd-4e95975d5ec8/util/0.log" Oct 03 09:39:16 crc kubenswrapper[4899]: I1003 09:39:16.163350 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45_c4f4d920-9e4c-4828-89dd-4e95975d5ec8/pull/0.log" Oct 03 09:39:16 crc kubenswrapper[4899]: I1003 09:39:16.209565 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45_c4f4d920-9e4c-4828-89dd-4e95975d5ec8/pull/0.log" Oct 03 09:39:16 crc kubenswrapper[4899]: I1003 09:39:16.348178 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45_c4f4d920-9e4c-4828-89dd-4e95975d5ec8/pull/0.log" Oct 03 09:39:16 crc kubenswrapper[4899]: I1003 09:39:16.371144 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45_c4f4d920-9e4c-4828-89dd-4e95975d5ec8/util/0.log" Oct 03 09:39:16 crc kubenswrapper[4899]: I1003 09:39:16.417683 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45_c4f4d920-9e4c-4828-89dd-4e95975d5ec8/extract/0.log" Oct 03 09:39:16 crc kubenswrapper[4899]: I1003 09:39:16.579808 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-stjfx_2a4e1059-563b-443d-a7ce-d8af764e8900/marketplace-operator/0.log" Oct 03 09:39:16 crc kubenswrapper[4899]: I1003 09:39:16.630907 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8bmxl_16e11948-6a93-444d-b9f9-c8f60100475c/extract-utilities/0.log" Oct 03 09:39:16 crc kubenswrapper[4899]: I1003 09:39:16.856919 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8bmxl_16e11948-6a93-444d-b9f9-c8f60100475c/extract-utilities/0.log" Oct 03 09:39:16 crc kubenswrapper[4899]: I1003 09:39:16.866012 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8bmxl_16e11948-6a93-444d-b9f9-c8f60100475c/extract-content/0.log" Oct 03 09:39:16 crc kubenswrapper[4899]: I1003 09:39:16.875313 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8bmxl_16e11948-6a93-444d-b9f9-c8f60100475c/extract-content/0.log" Oct 03 09:39:17 crc kubenswrapper[4899]: I1003 09:39:17.120116 4899 generic.go:334] "Generic (PLEG): container finished" podID="2e41b7bf-1c29-4178-86f8-baed90d93665" containerID="27ab76f05fff2898e87c6d168bc8a64580e92552f0fe98e769f906afa3671f85" exitCode=0 Oct 03 09:39:17 crc kubenswrapper[4899]: I1003 09:39:17.121183 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlrl8" event={"ID":"2e41b7bf-1c29-4178-86f8-baed90d93665","Type":"ContainerDied","Data":"27ab76f05fff2898e87c6d168bc8a64580e92552f0fe98e769f906afa3671f85"} Oct 03 09:39:17 crc kubenswrapper[4899]: I1003 09:39:17.163440 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8bmxl_16e11948-6a93-444d-b9f9-c8f60100475c/extract-content/0.log" Oct 03 09:39:17 crc kubenswrapper[4899]: I1003 09:39:17.200448 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8bmxl_16e11948-6a93-444d-b9f9-c8f60100475c/extract-utilities/0.log" Oct 03 09:39:17 crc kubenswrapper[4899]: I1003 09:39:17.210310 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8bmxl_16e11948-6a93-444d-b9f9-c8f60100475c/registry-server/0.log" Oct 03 09:39:17 crc kubenswrapper[4899]: I1003 09:39:17.231968 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hz84h_a8d22cd4-b04a-4b88-acd8-0949b94edd47/extract-utilities/0.log" Oct 03 09:39:17 crc kubenswrapper[4899]: I1003 09:39:17.401006 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hz84h_a8d22cd4-b04a-4b88-acd8-0949b94edd47/extract-content/0.log" Oct 03 09:39:17 crc kubenswrapper[4899]: I1003 09:39:17.409076 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hz84h_a8d22cd4-b04a-4b88-acd8-0949b94edd47/extract-utilities/0.log" Oct 03 09:39:17 crc kubenswrapper[4899]: I1003 09:39:17.440782 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hz84h_a8d22cd4-b04a-4b88-acd8-0949b94edd47/extract-content/0.log" Oct 03 09:39:17 crc kubenswrapper[4899]: I1003 09:39:17.555617 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hz84h_a8d22cd4-b04a-4b88-acd8-0949b94edd47/extract-utilities/0.log" Oct 03 09:39:17 crc kubenswrapper[4899]: I1003 09:39:17.607759 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hz84h_a8d22cd4-b04a-4b88-acd8-0949b94edd47/extract-content/0.log" Oct 03 09:39:17 crc kubenswrapper[4899]: I1003 09:39:17.979640 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hz84h_a8d22cd4-b04a-4b88-acd8-0949b94edd47/registry-server/0.log" Oct 03 09:39:18 crc kubenswrapper[4899]: I1003 09:39:18.131639 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlrl8" event={"ID":"2e41b7bf-1c29-4178-86f8-baed90d93665","Type":"ContainerStarted","Data":"58b436a3a4b55bb430506cde759b30ddd3efcfd3ad62ac893b724edd62e0d0c1"} Oct 03 09:39:19 crc kubenswrapper[4899]: I1003 09:39:19.141751 4899 generic.go:334] "Generic (PLEG): container finished" podID="2e41b7bf-1c29-4178-86f8-baed90d93665" containerID="58b436a3a4b55bb430506cde759b30ddd3efcfd3ad62ac893b724edd62e0d0c1" exitCode=0 Oct 03 09:39:19 crc kubenswrapper[4899]: I1003 09:39:19.141858 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlrl8" event={"ID":"2e41b7bf-1c29-4178-86f8-baed90d93665","Type":"ContainerDied","Data":"58b436a3a4b55bb430506cde759b30ddd3efcfd3ad62ac893b724edd62e0d0c1"} Oct 03 09:39:20 crc kubenswrapper[4899]: I1003 09:39:20.154342 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlrl8" event={"ID":"2e41b7bf-1c29-4178-86f8-baed90d93665","Type":"ContainerStarted","Data":"eb49c8ed96b4811e45bcace4873e3fa8855e878466d450f8bf2060b85e98dfc6"} Oct 03 09:39:20 crc kubenswrapper[4899]: I1003 09:39:20.182656 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zlrl8" podStartSLOduration=3.5333738329999997 podStartE2EDuration="6.182631033s" podCreationTimestamp="2025-10-03 09:39:14 +0000 UTC" firstStartedPulling="2025-10-03 09:39:17.123251265 +0000 UTC m=+3531.230736218" lastFinishedPulling="2025-10-03 09:39:19.772508465 +0000 UTC m=+3533.879993418" observedRunningTime="2025-10-03 09:39:20.176943623 +0000 UTC m=+3534.284428576" watchObservedRunningTime="2025-10-03 09:39:20.182631033 +0000 UTC m=+3534.290115986" Oct 03 09:39:22 crc kubenswrapper[4899]: I1003 09:39:22.112247 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-r6m6r" Oct 03 09:39:22 crc kubenswrapper[4899]: I1003 09:39:22.112551 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-r6m6r" Oct 03 09:39:22 crc kubenswrapper[4899]: I1003 09:39:22.161769 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-r6m6r" Oct 03 09:39:22 crc kubenswrapper[4899]: I1003 09:39:22.220381 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-r6m6r" Oct 03 09:39:22 crc kubenswrapper[4899]: I1003 09:39:22.569318 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-r6m6r"] Oct 03 09:39:24 crc kubenswrapper[4899]: I1003 09:39:24.189933 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-r6m6r" podUID="48c571bf-c1f1-4454-b47b-fe5f9a858ce8" containerName="registry-server" containerID="cri-o://7f09d7ee9333e00d280ac51816f9e6fce14ad6c8ad728ab492e45f64152ee39e" gracePeriod=2 Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.114555 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r6m6r" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.158039 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bxthr\" (UniqueName: \"kubernetes.io/projected/48c571bf-c1f1-4454-b47b-fe5f9a858ce8-kube-api-access-bxthr\") pod \"48c571bf-c1f1-4454-b47b-fe5f9a858ce8\" (UID: \"48c571bf-c1f1-4454-b47b-fe5f9a858ce8\") " Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.158084 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48c571bf-c1f1-4454-b47b-fe5f9a858ce8-utilities\") pod \"48c571bf-c1f1-4454-b47b-fe5f9a858ce8\" (UID: \"48c571bf-c1f1-4454-b47b-fe5f9a858ce8\") " Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.158155 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48c571bf-c1f1-4454-b47b-fe5f9a858ce8-catalog-content\") pod \"48c571bf-c1f1-4454-b47b-fe5f9a858ce8\" (UID: \"48c571bf-c1f1-4454-b47b-fe5f9a858ce8\") " Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.159058 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48c571bf-c1f1-4454-b47b-fe5f9a858ce8-utilities" (OuterVolumeSpecName: "utilities") pod "48c571bf-c1f1-4454-b47b-fe5f9a858ce8" (UID: "48c571bf-c1f1-4454-b47b-fe5f9a858ce8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.165614 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48c571bf-c1f1-4454-b47b-fe5f9a858ce8-kube-api-access-bxthr" (OuterVolumeSpecName: "kube-api-access-bxthr") pod "48c571bf-c1f1-4454-b47b-fe5f9a858ce8" (UID: "48c571bf-c1f1-4454-b47b-fe5f9a858ce8"). InnerVolumeSpecName "kube-api-access-bxthr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.201629 4899 generic.go:334] "Generic (PLEG): container finished" podID="48c571bf-c1f1-4454-b47b-fe5f9a858ce8" containerID="7f09d7ee9333e00d280ac51816f9e6fce14ad6c8ad728ab492e45f64152ee39e" exitCode=0 Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.201670 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r6m6r" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.201671 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6m6r" event={"ID":"48c571bf-c1f1-4454-b47b-fe5f9a858ce8","Type":"ContainerDied","Data":"7f09d7ee9333e00d280ac51816f9e6fce14ad6c8ad728ab492e45f64152ee39e"} Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.201800 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6m6r" event={"ID":"48c571bf-c1f1-4454-b47b-fe5f9a858ce8","Type":"ContainerDied","Data":"9da43e72a8c2be5d71333840a0b7a77bc5d5f4c68b8e129db9f51ebec711a897"} Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.201842 4899 scope.go:117] "RemoveContainer" containerID="7f09d7ee9333e00d280ac51816f9e6fce14ad6c8ad728ab492e45f64152ee39e" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.208547 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48c571bf-c1f1-4454-b47b-fe5f9a858ce8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "48c571bf-c1f1-4454-b47b-fe5f9a858ce8" (UID: "48c571bf-c1f1-4454-b47b-fe5f9a858ce8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.223508 4899 scope.go:117] "RemoveContainer" containerID="c227ba7624ebcbf3c8f2193c02db99bb500a145a6f07dcefc301ed9762068c86" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.242409 4899 scope.go:117] "RemoveContainer" containerID="7c148674106fda11a412cbc783e8bead455849d691dabc92352ff9dc500df2e4" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.260595 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bxthr\" (UniqueName: \"kubernetes.io/projected/48c571bf-c1f1-4454-b47b-fe5f9a858ce8-kube-api-access-bxthr\") on node \"crc\" DevicePath \"\"" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.260637 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48c571bf-c1f1-4454-b47b-fe5f9a858ce8-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.260651 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48c571bf-c1f1-4454-b47b-fe5f9a858ce8-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.283335 4899 scope.go:117] "RemoveContainer" containerID="7f09d7ee9333e00d280ac51816f9e6fce14ad6c8ad728ab492e45f64152ee39e" Oct 03 09:39:25 crc kubenswrapper[4899]: E1003 09:39:25.283821 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f09d7ee9333e00d280ac51816f9e6fce14ad6c8ad728ab492e45f64152ee39e\": container with ID starting with 7f09d7ee9333e00d280ac51816f9e6fce14ad6c8ad728ab492e45f64152ee39e not found: ID does not exist" containerID="7f09d7ee9333e00d280ac51816f9e6fce14ad6c8ad728ab492e45f64152ee39e" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.283866 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f09d7ee9333e00d280ac51816f9e6fce14ad6c8ad728ab492e45f64152ee39e"} err="failed to get container status \"7f09d7ee9333e00d280ac51816f9e6fce14ad6c8ad728ab492e45f64152ee39e\": rpc error: code = NotFound desc = could not find container \"7f09d7ee9333e00d280ac51816f9e6fce14ad6c8ad728ab492e45f64152ee39e\": container with ID starting with 7f09d7ee9333e00d280ac51816f9e6fce14ad6c8ad728ab492e45f64152ee39e not found: ID does not exist" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.283890 4899 scope.go:117] "RemoveContainer" containerID="c227ba7624ebcbf3c8f2193c02db99bb500a145a6f07dcefc301ed9762068c86" Oct 03 09:39:25 crc kubenswrapper[4899]: E1003 09:39:25.284279 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c227ba7624ebcbf3c8f2193c02db99bb500a145a6f07dcefc301ed9762068c86\": container with ID starting with c227ba7624ebcbf3c8f2193c02db99bb500a145a6f07dcefc301ed9762068c86 not found: ID does not exist" containerID="c227ba7624ebcbf3c8f2193c02db99bb500a145a6f07dcefc301ed9762068c86" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.284317 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c227ba7624ebcbf3c8f2193c02db99bb500a145a6f07dcefc301ed9762068c86"} err="failed to get container status \"c227ba7624ebcbf3c8f2193c02db99bb500a145a6f07dcefc301ed9762068c86\": rpc error: code = NotFound desc = could not find container \"c227ba7624ebcbf3c8f2193c02db99bb500a145a6f07dcefc301ed9762068c86\": container with ID starting with c227ba7624ebcbf3c8f2193c02db99bb500a145a6f07dcefc301ed9762068c86 not found: ID does not exist" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.284345 4899 scope.go:117] "RemoveContainer" containerID="7c148674106fda11a412cbc783e8bead455849d691dabc92352ff9dc500df2e4" Oct 03 09:39:25 crc kubenswrapper[4899]: E1003 09:39:25.284625 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c148674106fda11a412cbc783e8bead455849d691dabc92352ff9dc500df2e4\": container with ID starting with 7c148674106fda11a412cbc783e8bead455849d691dabc92352ff9dc500df2e4 not found: ID does not exist" containerID="7c148674106fda11a412cbc783e8bead455849d691dabc92352ff9dc500df2e4" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.284650 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c148674106fda11a412cbc783e8bead455849d691dabc92352ff9dc500df2e4"} err="failed to get container status \"7c148674106fda11a412cbc783e8bead455849d691dabc92352ff9dc500df2e4\": rpc error: code = NotFound desc = could not find container \"7c148674106fda11a412cbc783e8bead455849d691dabc92352ff9dc500df2e4\": container with ID starting with 7c148674106fda11a412cbc783e8bead455849d691dabc92352ff9dc500df2e4 not found: ID does not exist" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.301909 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zlrl8" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.301961 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zlrl8" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.352323 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zlrl8" Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.535327 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-r6m6r"] Oct 03 09:39:25 crc kubenswrapper[4899]: I1003 09:39:25.544813 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-r6m6r"] Oct 03 09:39:26 crc kubenswrapper[4899]: I1003 09:39:26.261308 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zlrl8" Oct 03 09:39:26 crc kubenswrapper[4899]: I1003 09:39:26.539212 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48c571bf-c1f1-4454-b47b-fe5f9a858ce8" path="/var/lib/kubelet/pods/48c571bf-c1f1-4454-b47b-fe5f9a858ce8/volumes" Oct 03 09:39:27 crc kubenswrapper[4899]: I1003 09:39:27.762425 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zlrl8"] Oct 03 09:39:29 crc kubenswrapper[4899]: I1003 09:39:29.247994 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zlrl8" podUID="2e41b7bf-1c29-4178-86f8-baed90d93665" containerName="registry-server" containerID="cri-o://eb49c8ed96b4811e45bcace4873e3fa8855e878466d450f8bf2060b85e98dfc6" gracePeriod=2 Oct 03 09:39:29 crc kubenswrapper[4899]: I1003 09:39:29.718617 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zlrl8" Oct 03 09:39:29 crc kubenswrapper[4899]: I1003 09:39:29.744608 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e41b7bf-1c29-4178-86f8-baed90d93665-catalog-content\") pod \"2e41b7bf-1c29-4178-86f8-baed90d93665\" (UID: \"2e41b7bf-1c29-4178-86f8-baed90d93665\") " Oct 03 09:39:29 crc kubenswrapper[4899]: I1003 09:39:29.744764 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e41b7bf-1c29-4178-86f8-baed90d93665-utilities\") pod \"2e41b7bf-1c29-4178-86f8-baed90d93665\" (UID: \"2e41b7bf-1c29-4178-86f8-baed90d93665\") " Oct 03 09:39:29 crc kubenswrapper[4899]: I1003 09:39:29.744874 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b79rg\" (UniqueName: \"kubernetes.io/projected/2e41b7bf-1c29-4178-86f8-baed90d93665-kube-api-access-b79rg\") pod \"2e41b7bf-1c29-4178-86f8-baed90d93665\" (UID: \"2e41b7bf-1c29-4178-86f8-baed90d93665\") " Oct 03 09:39:29 crc kubenswrapper[4899]: I1003 09:39:29.745651 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e41b7bf-1c29-4178-86f8-baed90d93665-utilities" (OuterVolumeSpecName: "utilities") pod "2e41b7bf-1c29-4178-86f8-baed90d93665" (UID: "2e41b7bf-1c29-4178-86f8-baed90d93665"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:39:29 crc kubenswrapper[4899]: I1003 09:39:29.751373 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e41b7bf-1c29-4178-86f8-baed90d93665-kube-api-access-b79rg" (OuterVolumeSpecName: "kube-api-access-b79rg") pod "2e41b7bf-1c29-4178-86f8-baed90d93665" (UID: "2e41b7bf-1c29-4178-86f8-baed90d93665"). InnerVolumeSpecName "kube-api-access-b79rg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:39:29 crc kubenswrapper[4899]: I1003 09:39:29.791273 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e41b7bf-1c29-4178-86f8-baed90d93665-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2e41b7bf-1c29-4178-86f8-baed90d93665" (UID: "2e41b7bf-1c29-4178-86f8-baed90d93665"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:39:29 crc kubenswrapper[4899]: I1003 09:39:29.846800 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e41b7bf-1c29-4178-86f8-baed90d93665-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 09:39:29 crc kubenswrapper[4899]: I1003 09:39:29.846840 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e41b7bf-1c29-4178-86f8-baed90d93665-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 09:39:29 crc kubenswrapper[4899]: I1003 09:39:29.846853 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b79rg\" (UniqueName: \"kubernetes.io/projected/2e41b7bf-1c29-4178-86f8-baed90d93665-kube-api-access-b79rg\") on node \"crc\" DevicePath \"\"" Oct 03 09:39:30 crc kubenswrapper[4899]: I1003 09:39:30.258443 4899 generic.go:334] "Generic (PLEG): container finished" podID="2e41b7bf-1c29-4178-86f8-baed90d93665" containerID="eb49c8ed96b4811e45bcace4873e3fa8855e878466d450f8bf2060b85e98dfc6" exitCode=0 Oct 03 09:39:30 crc kubenswrapper[4899]: I1003 09:39:30.258483 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlrl8" event={"ID":"2e41b7bf-1c29-4178-86f8-baed90d93665","Type":"ContainerDied","Data":"eb49c8ed96b4811e45bcace4873e3fa8855e878466d450f8bf2060b85e98dfc6"} Oct 03 09:39:30 crc kubenswrapper[4899]: I1003 09:39:30.258512 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlrl8" event={"ID":"2e41b7bf-1c29-4178-86f8-baed90d93665","Type":"ContainerDied","Data":"f2ee029d4b97aa383bc47859690a308181935ea97c527318aead2b2242229381"} Oct 03 09:39:30 crc kubenswrapper[4899]: I1003 09:39:30.258528 4899 scope.go:117] "RemoveContainer" containerID="eb49c8ed96b4811e45bcace4873e3fa8855e878466d450f8bf2060b85e98dfc6" Oct 03 09:39:30 crc kubenswrapper[4899]: I1003 09:39:30.258649 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zlrl8" Oct 03 09:39:30 crc kubenswrapper[4899]: I1003 09:39:30.292205 4899 scope.go:117] "RemoveContainer" containerID="58b436a3a4b55bb430506cde759b30ddd3efcfd3ad62ac893b724edd62e0d0c1" Oct 03 09:39:30 crc kubenswrapper[4899]: I1003 09:39:30.295529 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zlrl8"] Oct 03 09:39:30 crc kubenswrapper[4899]: I1003 09:39:30.306045 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zlrl8"] Oct 03 09:39:30 crc kubenswrapper[4899]: I1003 09:39:30.315612 4899 scope.go:117] "RemoveContainer" containerID="27ab76f05fff2898e87c6d168bc8a64580e92552f0fe98e769f906afa3671f85" Oct 03 09:39:30 crc kubenswrapper[4899]: I1003 09:39:30.357507 4899 scope.go:117] "RemoveContainer" containerID="eb49c8ed96b4811e45bcace4873e3fa8855e878466d450f8bf2060b85e98dfc6" Oct 03 09:39:30 crc kubenswrapper[4899]: E1003 09:39:30.360061 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb49c8ed96b4811e45bcace4873e3fa8855e878466d450f8bf2060b85e98dfc6\": container with ID starting with eb49c8ed96b4811e45bcace4873e3fa8855e878466d450f8bf2060b85e98dfc6 not found: ID does not exist" containerID="eb49c8ed96b4811e45bcace4873e3fa8855e878466d450f8bf2060b85e98dfc6" Oct 03 09:39:30 crc kubenswrapper[4899]: I1003 09:39:30.360127 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb49c8ed96b4811e45bcace4873e3fa8855e878466d450f8bf2060b85e98dfc6"} err="failed to get container status \"eb49c8ed96b4811e45bcace4873e3fa8855e878466d450f8bf2060b85e98dfc6\": rpc error: code = NotFound desc = could not find container \"eb49c8ed96b4811e45bcace4873e3fa8855e878466d450f8bf2060b85e98dfc6\": container with ID starting with eb49c8ed96b4811e45bcace4873e3fa8855e878466d450f8bf2060b85e98dfc6 not found: ID does not exist" Oct 03 09:39:30 crc kubenswrapper[4899]: I1003 09:39:30.360173 4899 scope.go:117] "RemoveContainer" containerID="58b436a3a4b55bb430506cde759b30ddd3efcfd3ad62ac893b724edd62e0d0c1" Oct 03 09:39:30 crc kubenswrapper[4899]: E1003 09:39:30.360717 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58b436a3a4b55bb430506cde759b30ddd3efcfd3ad62ac893b724edd62e0d0c1\": container with ID starting with 58b436a3a4b55bb430506cde759b30ddd3efcfd3ad62ac893b724edd62e0d0c1 not found: ID does not exist" containerID="58b436a3a4b55bb430506cde759b30ddd3efcfd3ad62ac893b724edd62e0d0c1" Oct 03 09:39:30 crc kubenswrapper[4899]: I1003 09:39:30.360761 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58b436a3a4b55bb430506cde759b30ddd3efcfd3ad62ac893b724edd62e0d0c1"} err="failed to get container status \"58b436a3a4b55bb430506cde759b30ddd3efcfd3ad62ac893b724edd62e0d0c1\": rpc error: code = NotFound desc = could not find container \"58b436a3a4b55bb430506cde759b30ddd3efcfd3ad62ac893b724edd62e0d0c1\": container with ID starting with 58b436a3a4b55bb430506cde759b30ddd3efcfd3ad62ac893b724edd62e0d0c1 not found: ID does not exist" Oct 03 09:39:30 crc kubenswrapper[4899]: I1003 09:39:30.360785 4899 scope.go:117] "RemoveContainer" containerID="27ab76f05fff2898e87c6d168bc8a64580e92552f0fe98e769f906afa3671f85" Oct 03 09:39:30 crc kubenswrapper[4899]: E1003 09:39:30.361206 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27ab76f05fff2898e87c6d168bc8a64580e92552f0fe98e769f906afa3671f85\": container with ID starting with 27ab76f05fff2898e87c6d168bc8a64580e92552f0fe98e769f906afa3671f85 not found: ID does not exist" containerID="27ab76f05fff2898e87c6d168bc8a64580e92552f0fe98e769f906afa3671f85" Oct 03 09:39:30 crc kubenswrapper[4899]: I1003 09:39:30.361247 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27ab76f05fff2898e87c6d168bc8a64580e92552f0fe98e769f906afa3671f85"} err="failed to get container status \"27ab76f05fff2898e87c6d168bc8a64580e92552f0fe98e769f906afa3671f85\": rpc error: code = NotFound desc = could not find container \"27ab76f05fff2898e87c6d168bc8a64580e92552f0fe98e769f906afa3671f85\": container with ID starting with 27ab76f05fff2898e87c6d168bc8a64580e92552f0fe98e769f906afa3671f85 not found: ID does not exist" Oct 03 09:39:30 crc kubenswrapper[4899]: I1003 09:39:30.538345 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e41b7bf-1c29-4178-86f8-baed90d93665" path="/var/lib/kubelet/pods/2e41b7bf-1c29-4178-86f8-baed90d93665/volumes" Oct 03 09:39:42 crc kubenswrapper[4899]: I1003 09:39:42.198681 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:39:42 crc kubenswrapper[4899]: I1003 09:39:42.199292 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:39:47 crc kubenswrapper[4899]: E1003 09:39:47.808061 4899 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.217:34208->38.129.56.217:44793: write tcp 38.129.56.217:34208->38.129.56.217:44793: write: broken pipe Oct 03 09:40:12 crc kubenswrapper[4899]: I1003 09:40:12.198094 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:40:12 crc kubenswrapper[4899]: I1003 09:40:12.198626 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:40:12 crc kubenswrapper[4899]: I1003 09:40:12.198674 4899 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 09:40:12 crc kubenswrapper[4899]: I1003 09:40:12.199391 4899 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba"} pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 09:40:12 crc kubenswrapper[4899]: I1003 09:40:12.199441 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" containerID="cri-o://b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" gracePeriod=600 Oct 03 09:40:12 crc kubenswrapper[4899]: E1003 09:40:12.329830 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:40:12 crc kubenswrapper[4899]: I1003 09:40:12.645415 4899 generic.go:334] "Generic (PLEG): container finished" podID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" exitCode=0 Oct 03 09:40:12 crc kubenswrapper[4899]: I1003 09:40:12.645450 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerDied","Data":"b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba"} Oct 03 09:40:12 crc kubenswrapper[4899]: I1003 09:40:12.645507 4899 scope.go:117] "RemoveContainer" containerID="512c2485fabb512b76ede2aa8b2cf298ff6f84b744dde89782c0144994984843" Oct 03 09:40:12 crc kubenswrapper[4899]: I1003 09:40:12.646195 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:40:12 crc kubenswrapper[4899]: E1003 09:40:12.646498 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:40:27 crc kubenswrapper[4899]: I1003 09:40:27.527339 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:40:27 crc kubenswrapper[4899]: E1003 09:40:27.528308 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:40:38 crc kubenswrapper[4899]: I1003 09:40:38.531727 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:40:38 crc kubenswrapper[4899]: E1003 09:40:38.532561 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:40:52 crc kubenswrapper[4899]: I1003 09:40:52.527536 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:40:52 crc kubenswrapper[4899]: E1003 09:40:52.528207 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:41:04 crc kubenswrapper[4899]: I1003 09:41:04.528050 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:41:04 crc kubenswrapper[4899]: E1003 09:41:04.528809 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:41:16 crc kubenswrapper[4899]: I1003 09:41:16.230109 4899 generic.go:334] "Generic (PLEG): container finished" podID="51d13c5b-183a-42b0-a15d-156896f6154e" containerID="31d6abbf481210b871a1f677c180f0af9d33f894b3224f7e28d56af84519722d" exitCode=0 Oct 03 09:41:16 crc kubenswrapper[4899]: I1003 09:41:16.230198 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rf2ld/must-gather-xgz5m" event={"ID":"51d13c5b-183a-42b0-a15d-156896f6154e","Type":"ContainerDied","Data":"31d6abbf481210b871a1f677c180f0af9d33f894b3224f7e28d56af84519722d"} Oct 03 09:41:16 crc kubenswrapper[4899]: I1003 09:41:16.231320 4899 scope.go:117] "RemoveContainer" containerID="31d6abbf481210b871a1f677c180f0af9d33f894b3224f7e28d56af84519722d" Oct 03 09:41:16 crc kubenswrapper[4899]: I1003 09:41:16.512042 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-rf2ld_must-gather-xgz5m_51d13c5b-183a-42b0-a15d-156896f6154e/gather/0.log" Oct 03 09:41:19 crc kubenswrapper[4899]: I1003 09:41:19.536235 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:41:19 crc kubenswrapper[4899]: E1003 09:41:19.537031 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:41:23 crc kubenswrapper[4899]: I1003 09:41:23.924157 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-rf2ld/must-gather-xgz5m"] Oct 03 09:41:23 crc kubenswrapper[4899]: I1003 09:41:23.925102 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-rf2ld/must-gather-xgz5m" podUID="51d13c5b-183a-42b0-a15d-156896f6154e" containerName="copy" containerID="cri-o://779eb809609563c28b5f515a3783dc17bf8e1a6fe4d7ec6647e4d2917931eef4" gracePeriod=2 Oct 03 09:41:23 crc kubenswrapper[4899]: I1003 09:41:23.934453 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-rf2ld/must-gather-xgz5m"] Oct 03 09:41:24 crc kubenswrapper[4899]: I1003 09:41:24.307143 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-rf2ld_must-gather-xgz5m_51d13c5b-183a-42b0-a15d-156896f6154e/copy/0.log" Oct 03 09:41:24 crc kubenswrapper[4899]: I1003 09:41:24.308568 4899 generic.go:334] "Generic (PLEG): container finished" podID="51d13c5b-183a-42b0-a15d-156896f6154e" containerID="779eb809609563c28b5f515a3783dc17bf8e1a6fe4d7ec6647e4d2917931eef4" exitCode=143 Oct 03 09:41:24 crc kubenswrapper[4899]: I1003 09:41:24.445379 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-rf2ld_must-gather-xgz5m_51d13c5b-183a-42b0-a15d-156896f6154e/copy/0.log" Oct 03 09:41:24 crc kubenswrapper[4899]: I1003 09:41:24.445931 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rf2ld/must-gather-xgz5m" Oct 03 09:41:24 crc kubenswrapper[4899]: I1003 09:41:24.566542 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77cgh\" (UniqueName: \"kubernetes.io/projected/51d13c5b-183a-42b0-a15d-156896f6154e-kube-api-access-77cgh\") pod \"51d13c5b-183a-42b0-a15d-156896f6154e\" (UID: \"51d13c5b-183a-42b0-a15d-156896f6154e\") " Oct 03 09:41:24 crc kubenswrapper[4899]: I1003 09:41:24.566801 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/51d13c5b-183a-42b0-a15d-156896f6154e-must-gather-output\") pod \"51d13c5b-183a-42b0-a15d-156896f6154e\" (UID: \"51d13c5b-183a-42b0-a15d-156896f6154e\") " Oct 03 09:41:24 crc kubenswrapper[4899]: I1003 09:41:24.575263 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51d13c5b-183a-42b0-a15d-156896f6154e-kube-api-access-77cgh" (OuterVolumeSpecName: "kube-api-access-77cgh") pod "51d13c5b-183a-42b0-a15d-156896f6154e" (UID: "51d13c5b-183a-42b0-a15d-156896f6154e"). InnerVolumeSpecName "kube-api-access-77cgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:41:24 crc kubenswrapper[4899]: I1003 09:41:24.673144 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77cgh\" (UniqueName: \"kubernetes.io/projected/51d13c5b-183a-42b0-a15d-156896f6154e-kube-api-access-77cgh\") on node \"crc\" DevicePath \"\"" Oct 03 09:41:24 crc kubenswrapper[4899]: I1003 09:41:24.746292 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51d13c5b-183a-42b0-a15d-156896f6154e-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "51d13c5b-183a-42b0-a15d-156896f6154e" (UID: "51d13c5b-183a-42b0-a15d-156896f6154e"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:41:24 crc kubenswrapper[4899]: I1003 09:41:24.775677 4899 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/51d13c5b-183a-42b0-a15d-156896f6154e-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 03 09:41:25 crc kubenswrapper[4899]: I1003 09:41:25.320950 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-rf2ld_must-gather-xgz5m_51d13c5b-183a-42b0-a15d-156896f6154e/copy/0.log" Oct 03 09:41:25 crc kubenswrapper[4899]: I1003 09:41:25.321252 4899 scope.go:117] "RemoveContainer" containerID="779eb809609563c28b5f515a3783dc17bf8e1a6fe4d7ec6647e4d2917931eef4" Oct 03 09:41:25 crc kubenswrapper[4899]: I1003 09:41:25.321370 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rf2ld/must-gather-xgz5m" Oct 03 09:41:25 crc kubenswrapper[4899]: I1003 09:41:25.347087 4899 scope.go:117] "RemoveContainer" containerID="31d6abbf481210b871a1f677c180f0af9d33f894b3224f7e28d56af84519722d" Oct 03 09:41:26 crc kubenswrapper[4899]: I1003 09:41:26.538620 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51d13c5b-183a-42b0-a15d-156896f6154e" path="/var/lib/kubelet/pods/51d13c5b-183a-42b0-a15d-156896f6154e/volumes" Oct 03 09:41:30 crc kubenswrapper[4899]: I1003 09:41:30.527730 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:41:30 crc kubenswrapper[4899]: E1003 09:41:30.528689 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:41:44 crc kubenswrapper[4899]: I1003 09:41:44.527181 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:41:44 crc kubenswrapper[4899]: E1003 09:41:44.527936 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.223443 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-b8z59/must-gather-ssb8q"] Oct 03 09:41:50 crc kubenswrapper[4899]: E1003 09:41:50.224329 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51d13c5b-183a-42b0-a15d-156896f6154e" containerName="copy" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.224344 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="51d13c5b-183a-42b0-a15d-156896f6154e" containerName="copy" Oct 03 09:41:50 crc kubenswrapper[4899]: E1003 09:41:50.224367 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51d13c5b-183a-42b0-a15d-156896f6154e" containerName="gather" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.224373 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="51d13c5b-183a-42b0-a15d-156896f6154e" containerName="gather" Oct 03 09:41:50 crc kubenswrapper[4899]: E1003 09:41:50.224383 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e41b7bf-1c29-4178-86f8-baed90d93665" containerName="registry-server" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.224392 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e41b7bf-1c29-4178-86f8-baed90d93665" containerName="registry-server" Oct 03 09:41:50 crc kubenswrapper[4899]: E1003 09:41:50.224408 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48c571bf-c1f1-4454-b47b-fe5f9a858ce8" containerName="extract-content" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.224414 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="48c571bf-c1f1-4454-b47b-fe5f9a858ce8" containerName="extract-content" Oct 03 09:41:50 crc kubenswrapper[4899]: E1003 09:41:50.224430 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e41b7bf-1c29-4178-86f8-baed90d93665" containerName="extract-utilities" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.224442 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e41b7bf-1c29-4178-86f8-baed90d93665" containerName="extract-utilities" Oct 03 09:41:50 crc kubenswrapper[4899]: E1003 09:41:50.224459 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e41b7bf-1c29-4178-86f8-baed90d93665" containerName="extract-content" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.224466 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e41b7bf-1c29-4178-86f8-baed90d93665" containerName="extract-content" Oct 03 09:41:50 crc kubenswrapper[4899]: E1003 09:41:50.224473 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48c571bf-c1f1-4454-b47b-fe5f9a858ce8" containerName="extract-utilities" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.224480 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="48c571bf-c1f1-4454-b47b-fe5f9a858ce8" containerName="extract-utilities" Oct 03 09:41:50 crc kubenswrapper[4899]: E1003 09:41:50.224499 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48c571bf-c1f1-4454-b47b-fe5f9a858ce8" containerName="registry-server" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.224504 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="48c571bf-c1f1-4454-b47b-fe5f9a858ce8" containerName="registry-server" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.224700 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="51d13c5b-183a-42b0-a15d-156896f6154e" containerName="copy" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.224717 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e41b7bf-1c29-4178-86f8-baed90d93665" containerName="registry-server" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.224733 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="51d13c5b-183a-42b0-a15d-156896f6154e" containerName="gather" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.224758 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="48c571bf-c1f1-4454-b47b-fe5f9a858ce8" containerName="registry-server" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.225821 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8z59/must-gather-ssb8q" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.227800 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-b8z59"/"default-dockercfg-cvxjj" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.233626 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-b8z59"/"kube-root-ca.crt" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.236663 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-b8z59"/"openshift-service-ca.crt" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.236745 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-b8z59/must-gather-ssb8q"] Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.284390 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkqwh\" (UniqueName: \"kubernetes.io/projected/6c9c1639-66c8-4f63-be49-87f099fa6a39-kube-api-access-lkqwh\") pod \"must-gather-ssb8q\" (UID: \"6c9c1639-66c8-4f63-be49-87f099fa6a39\") " pod="openshift-must-gather-b8z59/must-gather-ssb8q" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.284565 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6c9c1639-66c8-4f63-be49-87f099fa6a39-must-gather-output\") pod \"must-gather-ssb8q\" (UID: \"6c9c1639-66c8-4f63-be49-87f099fa6a39\") " pod="openshift-must-gather-b8z59/must-gather-ssb8q" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.385842 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkqwh\" (UniqueName: \"kubernetes.io/projected/6c9c1639-66c8-4f63-be49-87f099fa6a39-kube-api-access-lkqwh\") pod \"must-gather-ssb8q\" (UID: \"6c9c1639-66c8-4f63-be49-87f099fa6a39\") " pod="openshift-must-gather-b8z59/must-gather-ssb8q" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.386466 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6c9c1639-66c8-4f63-be49-87f099fa6a39-must-gather-output\") pod \"must-gather-ssb8q\" (UID: \"6c9c1639-66c8-4f63-be49-87f099fa6a39\") " pod="openshift-must-gather-b8z59/must-gather-ssb8q" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.386872 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6c9c1639-66c8-4f63-be49-87f099fa6a39-must-gather-output\") pod \"must-gather-ssb8q\" (UID: \"6c9c1639-66c8-4f63-be49-87f099fa6a39\") " pod="openshift-must-gather-b8z59/must-gather-ssb8q" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.408614 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkqwh\" (UniqueName: \"kubernetes.io/projected/6c9c1639-66c8-4f63-be49-87f099fa6a39-kube-api-access-lkqwh\") pod \"must-gather-ssb8q\" (UID: \"6c9c1639-66c8-4f63-be49-87f099fa6a39\") " pod="openshift-must-gather-b8z59/must-gather-ssb8q" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.545391 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8z59/must-gather-ssb8q" Oct 03 09:41:50 crc kubenswrapper[4899]: I1003 09:41:50.982439 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-b8z59/must-gather-ssb8q"] Oct 03 09:41:51 crc kubenswrapper[4899]: I1003 09:41:51.611450 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8z59/must-gather-ssb8q" event={"ID":"6c9c1639-66c8-4f63-be49-87f099fa6a39","Type":"ContainerStarted","Data":"58c89a6b7e3f0b0e2ddc98d210a60844d732e7d1046002dc29d2cd8be1ffb78e"} Oct 03 09:41:51 crc kubenswrapper[4899]: I1003 09:41:51.611911 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8z59/must-gather-ssb8q" event={"ID":"6c9c1639-66c8-4f63-be49-87f099fa6a39","Type":"ContainerStarted","Data":"91c1387c77c74cff722f4f72111b70fe091939c6ca7250540b45e9c88c6ddfe4"} Oct 03 09:41:51 crc kubenswrapper[4899]: I1003 09:41:51.611928 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8z59/must-gather-ssb8q" event={"ID":"6c9c1639-66c8-4f63-be49-87f099fa6a39","Type":"ContainerStarted","Data":"71aba91da2e0e1b5700f248aaeb51e4a7359388ad11baf37c5a3609262d43b29"} Oct 03 09:41:51 crc kubenswrapper[4899]: I1003 09:41:51.630680 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-b8z59/must-gather-ssb8q" podStartSLOduration=1.630615199 podStartE2EDuration="1.630615199s" podCreationTimestamp="2025-10-03 09:41:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 09:41:51.626544481 +0000 UTC m=+3685.734029434" watchObservedRunningTime="2025-10-03 09:41:51.630615199 +0000 UTC m=+3685.738100162" Oct 03 09:41:53 crc kubenswrapper[4899]: E1003 09:41:53.936478 4899 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.129.56.217:56828->38.129.56.217:44793: read tcp 38.129.56.217:56828->38.129.56.217:44793: read: connection reset by peer Oct 03 09:41:54 crc kubenswrapper[4899]: I1003 09:41:54.977618 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-b8z59/crc-debug-rjgjh"] Oct 03 09:41:54 crc kubenswrapper[4899]: I1003 09:41:54.979180 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8z59/crc-debug-rjgjh" Oct 03 09:41:55 crc kubenswrapper[4899]: I1003 09:41:55.083940 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4b0e8903-c369-4e44-8003-6e83352ede7e-host\") pod \"crc-debug-rjgjh\" (UID: \"4b0e8903-c369-4e44-8003-6e83352ede7e\") " pod="openshift-must-gather-b8z59/crc-debug-rjgjh" Oct 03 09:41:55 crc kubenswrapper[4899]: I1003 09:41:55.083983 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmlpm\" (UniqueName: \"kubernetes.io/projected/4b0e8903-c369-4e44-8003-6e83352ede7e-kube-api-access-mmlpm\") pod \"crc-debug-rjgjh\" (UID: \"4b0e8903-c369-4e44-8003-6e83352ede7e\") " pod="openshift-must-gather-b8z59/crc-debug-rjgjh" Oct 03 09:41:55 crc kubenswrapper[4899]: I1003 09:41:55.186450 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4b0e8903-c369-4e44-8003-6e83352ede7e-host\") pod \"crc-debug-rjgjh\" (UID: \"4b0e8903-c369-4e44-8003-6e83352ede7e\") " pod="openshift-must-gather-b8z59/crc-debug-rjgjh" Oct 03 09:41:55 crc kubenswrapper[4899]: I1003 09:41:55.186807 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmlpm\" (UniqueName: \"kubernetes.io/projected/4b0e8903-c369-4e44-8003-6e83352ede7e-kube-api-access-mmlpm\") pod \"crc-debug-rjgjh\" (UID: \"4b0e8903-c369-4e44-8003-6e83352ede7e\") " pod="openshift-must-gather-b8z59/crc-debug-rjgjh" Oct 03 09:41:55 crc kubenswrapper[4899]: I1003 09:41:55.186586 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4b0e8903-c369-4e44-8003-6e83352ede7e-host\") pod \"crc-debug-rjgjh\" (UID: \"4b0e8903-c369-4e44-8003-6e83352ede7e\") " pod="openshift-must-gather-b8z59/crc-debug-rjgjh" Oct 03 09:41:55 crc kubenswrapper[4899]: I1003 09:41:55.216935 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmlpm\" (UniqueName: \"kubernetes.io/projected/4b0e8903-c369-4e44-8003-6e83352ede7e-kube-api-access-mmlpm\") pod \"crc-debug-rjgjh\" (UID: \"4b0e8903-c369-4e44-8003-6e83352ede7e\") " pod="openshift-must-gather-b8z59/crc-debug-rjgjh" Oct 03 09:41:55 crc kubenswrapper[4899]: I1003 09:41:55.302641 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8z59/crc-debug-rjgjh" Oct 03 09:41:55 crc kubenswrapper[4899]: W1003 09:41:55.335915 4899 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b0e8903_c369_4e44_8003_6e83352ede7e.slice/crio-1c457385a3754b4030ee3c56fc4b62475d061516796b8da8b9e46f1912ad88cc WatchSource:0}: Error finding container 1c457385a3754b4030ee3c56fc4b62475d061516796b8da8b9e46f1912ad88cc: Status 404 returned error can't find the container with id 1c457385a3754b4030ee3c56fc4b62475d061516796b8da8b9e46f1912ad88cc Oct 03 09:41:55 crc kubenswrapper[4899]: I1003 09:41:55.643689 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8z59/crc-debug-rjgjh" event={"ID":"4b0e8903-c369-4e44-8003-6e83352ede7e","Type":"ContainerStarted","Data":"1c457385a3754b4030ee3c56fc4b62475d061516796b8da8b9e46f1912ad88cc"} Oct 03 09:41:56 crc kubenswrapper[4899]: I1003 09:41:56.654989 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8z59/crc-debug-rjgjh" event={"ID":"4b0e8903-c369-4e44-8003-6e83352ede7e","Type":"ContainerStarted","Data":"29ca2859256c29e15c87f58d41594e0030e2a4a07c8f64f419b405bd020e8405"} Oct 03 09:41:56 crc kubenswrapper[4899]: I1003 09:41:56.673561 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-b8z59/crc-debug-rjgjh" podStartSLOduration=2.673541504 podStartE2EDuration="2.673541504s" podCreationTimestamp="2025-10-03 09:41:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 09:41:56.669676932 +0000 UTC m=+3690.777161885" watchObservedRunningTime="2025-10-03 09:41:56.673541504 +0000 UTC m=+3690.781026457" Oct 03 09:41:59 crc kubenswrapper[4899]: I1003 09:41:59.528113 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:41:59 crc kubenswrapper[4899]: E1003 09:41:59.528991 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:42:12 crc kubenswrapper[4899]: I1003 09:42:12.527181 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:42:12 crc kubenswrapper[4899]: E1003 09:42:12.528051 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:42:27 crc kubenswrapper[4899]: I1003 09:42:27.527176 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:42:27 crc kubenswrapper[4899]: E1003 09:42:27.527974 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:42:41 crc kubenswrapper[4899]: I1003 09:42:41.528119 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:42:41 crc kubenswrapper[4899]: E1003 09:42:41.528989 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:42:50 crc kubenswrapper[4899]: I1003 09:42:50.691733 4899 scope.go:117] "RemoveContainer" containerID="d89c6ed1579bdd35e598a05a3d6f0dd4dfd86e8efdf111aeb47df2999aaccfb8" Oct 03 09:42:53 crc kubenswrapper[4899]: I1003 09:42:53.526935 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:42:53 crc kubenswrapper[4899]: E1003 09:42:53.527727 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:42:59 crc kubenswrapper[4899]: I1003 09:42:59.077300 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-56dc79cc94-hbxqp_85b9210c-c4ec-4020-9137-f4b4fdf9dc51/barbican-api/0.log" Oct 03 09:42:59 crc kubenswrapper[4899]: I1003 09:42:59.121306 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-56dc79cc94-hbxqp_85b9210c-c4ec-4020-9137-f4b4fdf9dc51/barbican-api-log/0.log" Oct 03 09:42:59 crc kubenswrapper[4899]: I1003 09:42:59.307227 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-59465fcb84-kkbzz_0bd52669-a824-4b16-a840-2feed9e46a6c/barbican-keystone-listener/0.log" Oct 03 09:42:59 crc kubenswrapper[4899]: I1003 09:42:59.581412 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-59465fcb84-kkbzz_0bd52669-a824-4b16-a840-2feed9e46a6c/barbican-keystone-listener-log/0.log" Oct 03 09:42:59 crc kubenswrapper[4899]: I1003 09:42:59.743486 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5b77574dc-vm5lv_69c8959c-64e4-43a6-9b2c-133dd960fc67/barbican-worker/0.log" Oct 03 09:42:59 crc kubenswrapper[4899]: I1003 09:42:59.768030 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5b77574dc-vm5lv_69c8959c-64e4-43a6-9b2c-133dd960fc67/barbican-worker-log/0.log" Oct 03 09:42:59 crc kubenswrapper[4899]: I1003 09:42:59.971106 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-s79hb_9ad20d0d-637a-49d9-8a83-bfae0d7c2a37/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:43:00 crc kubenswrapper[4899]: I1003 09:43:00.155855 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0/ceilometer-notification-agent/0.log" Oct 03 09:43:00 crc kubenswrapper[4899]: I1003 09:43:00.202455 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0/ceilometer-central-agent/0.log" Oct 03 09:43:00 crc kubenswrapper[4899]: I1003 09:43:00.230670 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0/proxy-httpd/0.log" Oct 03 09:43:00 crc kubenswrapper[4899]: I1003 09:43:00.370251 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ec33e983-7a1b-4ccd-9d45-ac1fc77a94a0/sg-core/0.log" Oct 03 09:43:00 crc kubenswrapper[4899]: I1003 09:43:00.492454 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_65edf39f-decc-476a-a5f3-b3d2d785ae67/cinder-api/0.log" Oct 03 09:43:00 crc kubenswrapper[4899]: I1003 09:43:00.611785 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_65edf39f-decc-476a-a5f3-b3d2d785ae67/cinder-api-log/0.log" Oct 03 09:43:00 crc kubenswrapper[4899]: I1003 09:43:00.760810 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0d32bb24-0270-45bb-b242-0aa2517f1cf3/cinder-scheduler/0.log" Oct 03 09:43:00 crc kubenswrapper[4899]: I1003 09:43:00.851477 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0d32bb24-0270-45bb-b242-0aa2517f1cf3/probe/0.log" Oct 03 09:43:00 crc kubenswrapper[4899]: I1003 09:43:00.963841 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-7l699_fef8b1ca-fe97-49df-9d53-89edfaa3d12a/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:43:01 crc kubenswrapper[4899]: I1003 09:43:01.099998 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-djftw_51abfa86-5d01-4b3e-aceb-155ded93aa49/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:43:01 crc kubenswrapper[4899]: I1003 09:43:01.290963 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-rr46f_fc6f7423-a7b0-4bd0-ac84-f65eb45233b3/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:43:01 crc kubenswrapper[4899]: I1003 09:43:01.428079 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78c64bc9c5-hbbsx_b0cbf4db-6115-4cb0-8aa1-b773b07e37e4/init/0.log" Oct 03 09:43:01 crc kubenswrapper[4899]: I1003 09:43:01.571463 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78c64bc9c5-hbbsx_b0cbf4db-6115-4cb0-8aa1-b773b07e37e4/init/0.log" Oct 03 09:43:01 crc kubenswrapper[4899]: I1003 09:43:01.657230 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78c64bc9c5-hbbsx_b0cbf4db-6115-4cb0-8aa1-b773b07e37e4/dnsmasq-dns/0.log" Oct 03 09:43:01 crc kubenswrapper[4899]: I1003 09:43:01.803930 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-tskjn_521016b7-078a-42dd-bec6-739da052031b/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:43:01 crc kubenswrapper[4899]: I1003 09:43:01.882176 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee/glance-httpd/0.log" Oct 03 09:43:02 crc kubenswrapper[4899]: I1003 09:43:02.018143 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_aa6b0cb8-db9d-4548-b2f5-6f21d10c6cee/glance-log/0.log" Oct 03 09:43:02 crc kubenswrapper[4899]: I1003 09:43:02.121714 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_64f5461f-c255-4b93-9d86-65321f2dc74b/glance-httpd/0.log" Oct 03 09:43:02 crc kubenswrapper[4899]: I1003 09:43:02.239507 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_64f5461f-c255-4b93-9d86-65321f2dc74b/glance-log/0.log" Oct 03 09:43:02 crc kubenswrapper[4899]: I1003 09:43:02.387551 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7f5ccd89b4-5dfm2_bf908711-a33e-40be-b5a0-c82254721d41/horizon/0.log" Oct 03 09:43:02 crc kubenswrapper[4899]: I1003 09:43:02.601140 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-hr45w_3aad51f1-e1c6-4677-a00a-e81438b9650a/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:43:02 crc kubenswrapper[4899]: I1003 09:43:02.811422 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-bxmbr_6a1cd0af-23db-4234-b97c-e57852eaa634/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:43:02 crc kubenswrapper[4899]: I1003 09:43:02.812167 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7f5ccd89b4-5dfm2_bf908711-a33e-40be-b5a0-c82254721d41/horizon-log/0.log" Oct 03 09:43:02 crc kubenswrapper[4899]: I1003 09:43:02.997601 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29324701-75rw5_dad5826d-6e25-43af-9916-de4fd15faa3a/keystone-cron/0.log" Oct 03 09:43:03 crc kubenswrapper[4899]: I1003 09:43:03.110433 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-688fdbdf8c-rnx7k_36a71770-b047-4d86-96c0-2888f9258599/keystone-api/0.log" Oct 03 09:43:03 crc kubenswrapper[4899]: I1003 09:43:03.216642 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_c05f2aa3-2568-45fa-ad1c-704870317a49/kube-state-metrics/0.log" Oct 03 09:43:03 crc kubenswrapper[4899]: I1003 09:43:03.284142 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-cb474_d9001fcb-add1-41c8-9638-097229339246/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:43:03 crc kubenswrapper[4899]: I1003 09:43:03.615182 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-cdb85d7df-9hdqn_3e425bd0-71ee-4b86-a246-e31d103a8745/neutron-api/0.log" Oct 03 09:43:03 crc kubenswrapper[4899]: I1003 09:43:03.694552 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-cdb85d7df-9hdqn_3e425bd0-71ee-4b86-a246-e31d103a8745/neutron-httpd/0.log" Oct 03 09:43:03 crc kubenswrapper[4899]: I1003 09:43:03.838598 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-6zj5l_9cf278a1-e80c-4739-9166-b75a8f6f3aea/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:43:04 crc kubenswrapper[4899]: I1003 09:43:04.426281 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_76c142d4-6700-4120-bd50-aaf4e1b8d5b8/nova-api-log/0.log" Oct 03 09:43:04 crc kubenswrapper[4899]: I1003 09:43:04.623291 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_4a91feca-8e5c-489a-bd2b-222f17e9b6d6/nova-cell0-conductor-conductor/0.log" Oct 03 09:43:04 crc kubenswrapper[4899]: I1003 09:43:04.685579 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_76c142d4-6700-4120-bd50-aaf4e1b8d5b8/nova-api-api/0.log" Oct 03 09:43:05 crc kubenswrapper[4899]: I1003 09:43:05.079486 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_d2f278b3-9210-4f40-96f3-1605efa157ef/nova-cell1-conductor-conductor/0.log" Oct 03 09:43:05 crc kubenswrapper[4899]: I1003 09:43:05.167993 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_0bdcd25b-9c6e-40d6-82d4-6af348b37c1a/nova-cell1-novncproxy-novncproxy/0.log" Oct 03 09:43:05 crc kubenswrapper[4899]: I1003 09:43:05.332077 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-kmjd7_0bc97030-8da8-4cd2-8645-9962d50b08d3/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:43:05 crc kubenswrapper[4899]: I1003 09:43:05.524151 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_4c14a82a-78c7-4366-a2bc-91e1f880d841/nova-metadata-log/0.log" Oct 03 09:43:06 crc kubenswrapper[4899]: I1003 09:43:06.005216 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_6969051c-bc07-454c-b958-b9e203f95ee5/nova-scheduler-scheduler/0.log" Oct 03 09:43:06 crc kubenswrapper[4899]: I1003 09:43:06.062314 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_b466ebf1-ec52-4c92-8ea9-f0f329c6ab93/mysql-bootstrap/0.log" Oct 03 09:43:06 crc kubenswrapper[4899]: I1003 09:43:06.321176 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_b466ebf1-ec52-4c92-8ea9-f0f329c6ab93/galera/0.log" Oct 03 09:43:06 crc kubenswrapper[4899]: I1003 09:43:06.325100 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_b466ebf1-ec52-4c92-8ea9-f0f329c6ab93/mysql-bootstrap/0.log" Oct 03 09:43:06 crc kubenswrapper[4899]: I1003 09:43:06.537310 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9ec49b55-9814-4053-a0dd-eda5b7f7995a/mysql-bootstrap/0.log" Oct 03 09:43:06 crc kubenswrapper[4899]: I1003 09:43:06.543845 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:43:06 crc kubenswrapper[4899]: E1003 09:43:06.544125 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:43:06 crc kubenswrapper[4899]: I1003 09:43:06.759749 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9ec49b55-9814-4053-a0dd-eda5b7f7995a/mysql-bootstrap/0.log" Oct 03 09:43:06 crc kubenswrapper[4899]: I1003 09:43:06.839987 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9ec49b55-9814-4053-a0dd-eda5b7f7995a/galera/0.log" Oct 03 09:43:06 crc kubenswrapper[4899]: I1003 09:43:06.872996 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_4c14a82a-78c7-4366-a2bc-91e1f880d841/nova-metadata-metadata/0.log" Oct 03 09:43:07 crc kubenswrapper[4899]: I1003 09:43:07.043204 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_1c6ed6e6-287d-4267-9cfd-b7b554691da8/openstackclient/0.log" Oct 03 09:43:07 crc kubenswrapper[4899]: I1003 09:43:07.224309 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-w992v_78c399de-31f5-439f-8f0b-24c8dba1875e/openstack-network-exporter/0.log" Oct 03 09:43:07 crc kubenswrapper[4899]: I1003 09:43:07.344551 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c7dff_a14a6054-9c4a-414f-ab4e-b0732e33ce1c/ovsdb-server-init/0.log" Oct 03 09:43:07 crc kubenswrapper[4899]: I1003 09:43:07.725189 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c7dff_a14a6054-9c4a-414f-ab4e-b0732e33ce1c/ovsdb-server-init/0.log" Oct 03 09:43:07 crc kubenswrapper[4899]: I1003 09:43:07.817838 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c7dff_a14a6054-9c4a-414f-ab4e-b0732e33ce1c/ovs-vswitchd/0.log" Oct 03 09:43:08 crc kubenswrapper[4899]: I1003 09:43:08.018574 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c7dff_a14a6054-9c4a-414f-ab4e-b0732e33ce1c/ovsdb-server/0.log" Oct 03 09:43:08 crc kubenswrapper[4899]: I1003 09:43:08.199857 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-wfz45_89364578-24ad-4c19-8e0b-ba123f58f4eb/ovn-controller/0.log" Oct 03 09:43:08 crc kubenswrapper[4899]: I1003 09:43:08.411601 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-2swmz_2d954cfa-e3a2-4fc0-a1af-61234475db07/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:43:08 crc kubenswrapper[4899]: I1003 09:43:08.626871 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_71e79cb5-28f4-4102-892e-479502ff4db9/openstack-network-exporter/0.log" Oct 03 09:43:08 crc kubenswrapper[4899]: I1003 09:43:08.757010 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_71e79cb5-28f4-4102-892e-479502ff4db9/ovn-northd/0.log" Oct 03 09:43:08 crc kubenswrapper[4899]: I1003 09:43:08.910668 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_6ad382d6-f0b8-43b2-aeea-98ace59fb6cf/openstack-network-exporter/0.log" Oct 03 09:43:08 crc kubenswrapper[4899]: I1003 09:43:08.959805 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_6ad382d6-f0b8-43b2-aeea-98ace59fb6cf/ovsdbserver-nb/0.log" Oct 03 09:43:09 crc kubenswrapper[4899]: I1003 09:43:09.203298 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_3e11e1a1-7b97-4717-85bc-834b214d4526/openstack-network-exporter/0.log" Oct 03 09:43:09 crc kubenswrapper[4899]: I1003 09:43:09.251182 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_3e11e1a1-7b97-4717-85bc-834b214d4526/ovsdbserver-sb/0.log" Oct 03 09:43:09 crc kubenswrapper[4899]: I1003 09:43:09.452045 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-568fd9848b-bw6ch_73b0bcea-efbe-4c62-b97c-031ea8fee918/placement-api/0.log" Oct 03 09:43:09 crc kubenswrapper[4899]: I1003 09:43:09.612740 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-568fd9848b-bw6ch_73b0bcea-efbe-4c62-b97c-031ea8fee918/placement-log/0.log" Oct 03 09:43:09 crc kubenswrapper[4899]: I1003 09:43:09.695602 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_75734f37-27af-4b79-ac28-4546a092e218/setup-container/0.log" Oct 03 09:43:09 crc kubenswrapper[4899]: I1003 09:43:09.884415 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_75734f37-27af-4b79-ac28-4546a092e218/rabbitmq/0.log" Oct 03 09:43:09 crc kubenswrapper[4899]: I1003 09:43:09.952630 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_75734f37-27af-4b79-ac28-4546a092e218/setup-container/0.log" Oct 03 09:43:10 crc kubenswrapper[4899]: I1003 09:43:10.096583 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7415d874-aa51-4fc4-8b40-b487392c248c/setup-container/0.log" Oct 03 09:43:10 crc kubenswrapper[4899]: I1003 09:43:10.332527 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7415d874-aa51-4fc4-8b40-b487392c248c/setup-container/0.log" Oct 03 09:43:10 crc kubenswrapper[4899]: I1003 09:43:10.347236 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7415d874-aa51-4fc4-8b40-b487392c248c/rabbitmq/0.log" Oct 03 09:43:10 crc kubenswrapper[4899]: I1003 09:43:10.592186 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-df25h_23483d3a-dd9e-4fcd-81a6-465936a69838/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:43:10 crc kubenswrapper[4899]: I1003 09:43:10.666113 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-9rwqd_f5dcd890-e7fa-4739-919b-6b1b77ff741a/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:43:10 crc kubenswrapper[4899]: I1003 09:43:10.876293 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-b8kfp_b5ae39e5-0bf8-4627-8e97-7162c0861524/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:43:11 crc kubenswrapper[4899]: I1003 09:43:11.094547 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-t58bp_ec00fd0d-26af-42f1-afdd-a21d668719d5/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:43:11 crc kubenswrapper[4899]: I1003 09:43:11.201646 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-4qh2m_f95e0dc0-dc93-4ec2-ae14-96f3641e651a/ssh-known-hosts-edpm-deployment/0.log" Oct 03 09:43:11 crc kubenswrapper[4899]: I1003 09:43:11.460265 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-54794d7d5c-64vlg_088c667d-5a03-44d1-a2fc-c9de7910e5a8/proxy-server/0.log" Oct 03 09:43:11 crc kubenswrapper[4899]: I1003 09:43:11.484379 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-54794d7d5c-64vlg_088c667d-5a03-44d1-a2fc-c9de7910e5a8/proxy-httpd/0.log" Oct 03 09:43:11 crc kubenswrapper[4899]: I1003 09:43:11.677368 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-s5xff_79a7b2d1-b9f8-4637-b1b2-1bfc4b5739ca/swift-ring-rebalance/0.log" Oct 03 09:43:11 crc kubenswrapper[4899]: I1003 09:43:11.828702 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/account-auditor/0.log" Oct 03 09:43:11 crc kubenswrapper[4899]: I1003 09:43:11.892429 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/account-reaper/0.log" Oct 03 09:43:11 crc kubenswrapper[4899]: I1003 09:43:11.998944 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/account-replicator/0.log" Oct 03 09:43:12 crc kubenswrapper[4899]: I1003 09:43:12.044112 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/account-server/0.log" Oct 03 09:43:12 crc kubenswrapper[4899]: I1003 09:43:12.084375 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/container-auditor/0.log" Oct 03 09:43:12 crc kubenswrapper[4899]: I1003 09:43:12.240372 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/container-server/0.log" Oct 03 09:43:12 crc kubenswrapper[4899]: I1003 09:43:12.241797 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/container-replicator/0.log" Oct 03 09:43:12 crc kubenswrapper[4899]: I1003 09:43:12.304364 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/container-updater/0.log" Oct 03 09:43:12 crc kubenswrapper[4899]: I1003 09:43:12.497464 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/object-expirer/0.log" Oct 03 09:43:12 crc kubenswrapper[4899]: I1003 09:43:12.513747 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/object-auditor/0.log" Oct 03 09:43:12 crc kubenswrapper[4899]: I1003 09:43:12.569539 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/object-replicator/0.log" Oct 03 09:43:12 crc kubenswrapper[4899]: I1003 09:43:12.698303 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/object-server/0.log" Oct 03 09:43:12 crc kubenswrapper[4899]: I1003 09:43:12.699723 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/object-updater/0.log" Oct 03 09:43:12 crc kubenswrapper[4899]: I1003 09:43:12.798291 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/rsync/0.log" Oct 03 09:43:12 crc kubenswrapper[4899]: I1003 09:43:12.913107 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a9725bfb-2b4a-49d7-b4d8-c2235583f28f/swift-recon-cron/0.log" Oct 03 09:43:13 crc kubenswrapper[4899]: I1003 09:43:13.052564 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-kw265_78460eaf-b283-4155-be7c-57230376bbcc/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:43:13 crc kubenswrapper[4899]: I1003 09:43:13.241599 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_39b95e3c-c5c3-44c9-a89f-490bcde4fc69/tempest-tests-tempest-tests-runner/0.log" Oct 03 09:43:13 crc kubenswrapper[4899]: I1003 09:43:13.477996 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_56098c86-7f51-46a3-9838-dfa07d174475/test-operator-logs-container/0.log" Oct 03 09:43:13 crc kubenswrapper[4899]: I1003 09:43:13.666559 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-j9q4k_fea4a390-4920-4967-9e2a-152d46f212a3/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 09:43:14 crc kubenswrapper[4899]: I1003 09:43:14.374794 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5djj7"] Oct 03 09:43:14 crc kubenswrapper[4899]: I1003 09:43:14.377518 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5djj7" Oct 03 09:43:14 crc kubenswrapper[4899]: I1003 09:43:14.410043 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5djj7"] Oct 03 09:43:14 crc kubenswrapper[4899]: I1003 09:43:14.575490 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67-utilities\") pod \"redhat-marketplace-5djj7\" (UID: \"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67\") " pod="openshift-marketplace/redhat-marketplace-5djj7" Oct 03 09:43:14 crc kubenswrapper[4899]: I1003 09:43:14.575922 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7zhc\" (UniqueName: \"kubernetes.io/projected/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67-kube-api-access-v7zhc\") pod \"redhat-marketplace-5djj7\" (UID: \"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67\") " pod="openshift-marketplace/redhat-marketplace-5djj7" Oct 03 09:43:14 crc kubenswrapper[4899]: I1003 09:43:14.576025 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67-catalog-content\") pod \"redhat-marketplace-5djj7\" (UID: \"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67\") " pod="openshift-marketplace/redhat-marketplace-5djj7" Oct 03 09:43:14 crc kubenswrapper[4899]: I1003 09:43:14.677498 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67-utilities\") pod \"redhat-marketplace-5djj7\" (UID: \"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67\") " pod="openshift-marketplace/redhat-marketplace-5djj7" Oct 03 09:43:14 crc kubenswrapper[4899]: I1003 09:43:14.678323 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7zhc\" (UniqueName: \"kubernetes.io/projected/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67-kube-api-access-v7zhc\") pod \"redhat-marketplace-5djj7\" (UID: \"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67\") " pod="openshift-marketplace/redhat-marketplace-5djj7" Oct 03 09:43:14 crc kubenswrapper[4899]: I1003 09:43:14.678550 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67-catalog-content\") pod \"redhat-marketplace-5djj7\" (UID: \"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67\") " pod="openshift-marketplace/redhat-marketplace-5djj7" Oct 03 09:43:14 crc kubenswrapper[4899]: I1003 09:43:14.678043 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67-utilities\") pod \"redhat-marketplace-5djj7\" (UID: \"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67\") " pod="openshift-marketplace/redhat-marketplace-5djj7" Oct 03 09:43:14 crc kubenswrapper[4899]: I1003 09:43:14.678800 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67-catalog-content\") pod \"redhat-marketplace-5djj7\" (UID: \"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67\") " pod="openshift-marketplace/redhat-marketplace-5djj7" Oct 03 09:43:14 crc kubenswrapper[4899]: I1003 09:43:14.705683 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7zhc\" (UniqueName: \"kubernetes.io/projected/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67-kube-api-access-v7zhc\") pod \"redhat-marketplace-5djj7\" (UID: \"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67\") " pod="openshift-marketplace/redhat-marketplace-5djj7" Oct 03 09:43:14 crc kubenswrapper[4899]: I1003 09:43:14.707846 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5djj7" Oct 03 09:43:15 crc kubenswrapper[4899]: I1003 09:43:15.282248 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5djj7"] Oct 03 09:43:15 crc kubenswrapper[4899]: I1003 09:43:15.432099 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5djj7" event={"ID":"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67","Type":"ContainerStarted","Data":"40dc331138c95bd2131e8c22a77beec4d9ff3c27b3fa3976fb7b8feb77bed831"} Oct 03 09:43:16 crc kubenswrapper[4899]: I1003 09:43:16.446087 4899 generic.go:334] "Generic (PLEG): container finished" podID="bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67" containerID="b1c6547a29b6b3e07bb61874df48f651478cf4dcec6a3604ae826c42c1f3fbd8" exitCode=0 Oct 03 09:43:16 crc kubenswrapper[4899]: I1003 09:43:16.446402 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5djj7" event={"ID":"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67","Type":"ContainerDied","Data":"b1c6547a29b6b3e07bb61874df48f651478cf4dcec6a3604ae826c42c1f3fbd8"} Oct 03 09:43:16 crc kubenswrapper[4899]: I1003 09:43:16.449388 4899 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 09:43:17 crc kubenswrapper[4899]: I1003 09:43:17.471657 4899 generic.go:334] "Generic (PLEG): container finished" podID="bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67" containerID="97ea7ff9c65a2cca9e4432b827b5bb80ae12ac0f02c9cdda0bad40b9dddef227" exitCode=0 Oct 03 09:43:17 crc kubenswrapper[4899]: I1003 09:43:17.472270 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5djj7" event={"ID":"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67","Type":"ContainerDied","Data":"97ea7ff9c65a2cca9e4432b827b5bb80ae12ac0f02c9cdda0bad40b9dddef227"} Oct 03 09:43:18 crc kubenswrapper[4899]: I1003 09:43:18.551421 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5djj7" event={"ID":"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67","Type":"ContainerStarted","Data":"dc79264013150e01a18410758bde2777a7b7831456cb22d50a3a919d6d74e0ff"} Oct 03 09:43:18 crc kubenswrapper[4899]: I1003 09:43:18.568119 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5djj7" podStartSLOduration=2.980312848 podStartE2EDuration="4.568101766s" podCreationTimestamp="2025-10-03 09:43:14 +0000 UTC" firstStartedPulling="2025-10-03 09:43:16.449150881 +0000 UTC m=+3770.556635824" lastFinishedPulling="2025-10-03 09:43:18.036939789 +0000 UTC m=+3772.144424742" observedRunningTime="2025-10-03 09:43:18.564457811 +0000 UTC m=+3772.671942764" watchObservedRunningTime="2025-10-03 09:43:18.568101766 +0000 UTC m=+3772.675586719" Oct 03 09:43:20 crc kubenswrapper[4899]: I1003 09:43:20.526932 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:43:20 crc kubenswrapper[4899]: E1003 09:43:20.527402 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:43:21 crc kubenswrapper[4899]: I1003 09:43:21.581336 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_231e3958-c17a-4f0b-a83e-4801b497b942/memcached/0.log" Oct 03 09:43:24 crc kubenswrapper[4899]: I1003 09:43:24.708996 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5djj7" Oct 03 09:43:24 crc kubenswrapper[4899]: I1003 09:43:24.709564 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5djj7" Oct 03 09:43:24 crc kubenswrapper[4899]: I1003 09:43:24.761579 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5djj7" Oct 03 09:43:25 crc kubenswrapper[4899]: I1003 09:43:25.668595 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5djj7" Oct 03 09:43:25 crc kubenswrapper[4899]: I1003 09:43:25.717575 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5djj7"] Oct 03 09:43:27 crc kubenswrapper[4899]: I1003 09:43:27.635727 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5djj7" podUID="bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67" containerName="registry-server" containerID="cri-o://dc79264013150e01a18410758bde2777a7b7831456cb22d50a3a919d6d74e0ff" gracePeriod=2 Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.131614 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5djj7" Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.243635 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7zhc\" (UniqueName: \"kubernetes.io/projected/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67-kube-api-access-v7zhc\") pod \"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67\" (UID: \"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67\") " Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.243740 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67-utilities\") pod \"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67\" (UID: \"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67\") " Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.243832 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67-catalog-content\") pod \"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67\" (UID: \"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67\") " Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.245749 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67-utilities" (OuterVolumeSpecName: "utilities") pod "bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67" (UID: "bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.259455 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67" (UID: "bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.267470 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67-kube-api-access-v7zhc" (OuterVolumeSpecName: "kube-api-access-v7zhc") pod "bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67" (UID: "bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67"). InnerVolumeSpecName "kube-api-access-v7zhc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.346986 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7zhc\" (UniqueName: \"kubernetes.io/projected/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67-kube-api-access-v7zhc\") on node \"crc\" DevicePath \"\"" Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.347039 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.347050 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.647167 4899 generic.go:334] "Generic (PLEG): container finished" podID="bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67" containerID="dc79264013150e01a18410758bde2777a7b7831456cb22d50a3a919d6d74e0ff" exitCode=0 Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.647231 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5djj7" Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.647233 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5djj7" event={"ID":"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67","Type":"ContainerDied","Data":"dc79264013150e01a18410758bde2777a7b7831456cb22d50a3a919d6d74e0ff"} Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.648643 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5djj7" event={"ID":"bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67","Type":"ContainerDied","Data":"40dc331138c95bd2131e8c22a77beec4d9ff3c27b3fa3976fb7b8feb77bed831"} Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.648663 4899 scope.go:117] "RemoveContainer" containerID="dc79264013150e01a18410758bde2777a7b7831456cb22d50a3a919d6d74e0ff" Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.672064 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5djj7"] Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.679984 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5djj7"] Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.684135 4899 scope.go:117] "RemoveContainer" containerID="97ea7ff9c65a2cca9e4432b827b5bb80ae12ac0f02c9cdda0bad40b9dddef227" Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.715201 4899 scope.go:117] "RemoveContainer" containerID="b1c6547a29b6b3e07bb61874df48f651478cf4dcec6a3604ae826c42c1f3fbd8" Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.757759 4899 scope.go:117] "RemoveContainer" containerID="dc79264013150e01a18410758bde2777a7b7831456cb22d50a3a919d6d74e0ff" Oct 03 09:43:28 crc kubenswrapper[4899]: E1003 09:43:28.758672 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc79264013150e01a18410758bde2777a7b7831456cb22d50a3a919d6d74e0ff\": container with ID starting with dc79264013150e01a18410758bde2777a7b7831456cb22d50a3a919d6d74e0ff not found: ID does not exist" containerID="dc79264013150e01a18410758bde2777a7b7831456cb22d50a3a919d6d74e0ff" Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.758723 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc79264013150e01a18410758bde2777a7b7831456cb22d50a3a919d6d74e0ff"} err="failed to get container status \"dc79264013150e01a18410758bde2777a7b7831456cb22d50a3a919d6d74e0ff\": rpc error: code = NotFound desc = could not find container \"dc79264013150e01a18410758bde2777a7b7831456cb22d50a3a919d6d74e0ff\": container with ID starting with dc79264013150e01a18410758bde2777a7b7831456cb22d50a3a919d6d74e0ff not found: ID does not exist" Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.758757 4899 scope.go:117] "RemoveContainer" containerID="97ea7ff9c65a2cca9e4432b827b5bb80ae12ac0f02c9cdda0bad40b9dddef227" Oct 03 09:43:28 crc kubenswrapper[4899]: E1003 09:43:28.762393 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97ea7ff9c65a2cca9e4432b827b5bb80ae12ac0f02c9cdda0bad40b9dddef227\": container with ID starting with 97ea7ff9c65a2cca9e4432b827b5bb80ae12ac0f02c9cdda0bad40b9dddef227 not found: ID does not exist" containerID="97ea7ff9c65a2cca9e4432b827b5bb80ae12ac0f02c9cdda0bad40b9dddef227" Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.762437 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97ea7ff9c65a2cca9e4432b827b5bb80ae12ac0f02c9cdda0bad40b9dddef227"} err="failed to get container status \"97ea7ff9c65a2cca9e4432b827b5bb80ae12ac0f02c9cdda0bad40b9dddef227\": rpc error: code = NotFound desc = could not find container \"97ea7ff9c65a2cca9e4432b827b5bb80ae12ac0f02c9cdda0bad40b9dddef227\": container with ID starting with 97ea7ff9c65a2cca9e4432b827b5bb80ae12ac0f02c9cdda0bad40b9dddef227 not found: ID does not exist" Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.762463 4899 scope.go:117] "RemoveContainer" containerID="b1c6547a29b6b3e07bb61874df48f651478cf4dcec6a3604ae826c42c1f3fbd8" Oct 03 09:43:28 crc kubenswrapper[4899]: E1003 09:43:28.762937 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1c6547a29b6b3e07bb61874df48f651478cf4dcec6a3604ae826c42c1f3fbd8\": container with ID starting with b1c6547a29b6b3e07bb61874df48f651478cf4dcec6a3604ae826c42c1f3fbd8 not found: ID does not exist" containerID="b1c6547a29b6b3e07bb61874df48f651478cf4dcec6a3604ae826c42c1f3fbd8" Oct 03 09:43:28 crc kubenswrapper[4899]: I1003 09:43:28.762996 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1c6547a29b6b3e07bb61874df48f651478cf4dcec6a3604ae826c42c1f3fbd8"} err="failed to get container status \"b1c6547a29b6b3e07bb61874df48f651478cf4dcec6a3604ae826c42c1f3fbd8\": rpc error: code = NotFound desc = could not find container \"b1c6547a29b6b3e07bb61874df48f651478cf4dcec6a3604ae826c42c1f3fbd8\": container with ID starting with b1c6547a29b6b3e07bb61874df48f651478cf4dcec6a3604ae826c42c1f3fbd8 not found: ID does not exist" Oct 03 09:43:30 crc kubenswrapper[4899]: I1003 09:43:30.537375 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67" path="/var/lib/kubelet/pods/bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67/volumes" Oct 03 09:43:31 crc kubenswrapper[4899]: I1003 09:43:31.527083 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:43:31 crc kubenswrapper[4899]: E1003 09:43:31.527406 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:43:43 crc kubenswrapper[4899]: I1003 09:43:43.527115 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:43:43 crc kubenswrapper[4899]: E1003 09:43:43.528016 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:43:49 crc kubenswrapper[4899]: I1003 09:43:49.826283 4899 generic.go:334] "Generic (PLEG): container finished" podID="4b0e8903-c369-4e44-8003-6e83352ede7e" containerID="29ca2859256c29e15c87f58d41594e0030e2a4a07c8f64f419b405bd020e8405" exitCode=0 Oct 03 09:43:49 crc kubenswrapper[4899]: I1003 09:43:49.826372 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8z59/crc-debug-rjgjh" event={"ID":"4b0e8903-c369-4e44-8003-6e83352ede7e","Type":"ContainerDied","Data":"29ca2859256c29e15c87f58d41594e0030e2a4a07c8f64f419b405bd020e8405"} Oct 03 09:43:50 crc kubenswrapper[4899]: I1003 09:43:50.932029 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8z59/crc-debug-rjgjh" Oct 03 09:43:50 crc kubenswrapper[4899]: I1003 09:43:50.970166 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-b8z59/crc-debug-rjgjh"] Oct 03 09:43:50 crc kubenswrapper[4899]: I1003 09:43:50.980940 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-b8z59/crc-debug-rjgjh"] Oct 03 09:43:50 crc kubenswrapper[4899]: I1003 09:43:50.984649 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmlpm\" (UniqueName: \"kubernetes.io/projected/4b0e8903-c369-4e44-8003-6e83352ede7e-kube-api-access-mmlpm\") pod \"4b0e8903-c369-4e44-8003-6e83352ede7e\" (UID: \"4b0e8903-c369-4e44-8003-6e83352ede7e\") " Oct 03 09:43:50 crc kubenswrapper[4899]: I1003 09:43:50.984708 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4b0e8903-c369-4e44-8003-6e83352ede7e-host\") pod \"4b0e8903-c369-4e44-8003-6e83352ede7e\" (UID: \"4b0e8903-c369-4e44-8003-6e83352ede7e\") " Oct 03 09:43:50 crc kubenswrapper[4899]: I1003 09:43:50.984943 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4b0e8903-c369-4e44-8003-6e83352ede7e-host" (OuterVolumeSpecName: "host") pod "4b0e8903-c369-4e44-8003-6e83352ede7e" (UID: "4b0e8903-c369-4e44-8003-6e83352ede7e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 09:43:50 crc kubenswrapper[4899]: I1003 09:43:50.985451 4899 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4b0e8903-c369-4e44-8003-6e83352ede7e-host\") on node \"crc\" DevicePath \"\"" Oct 03 09:43:50 crc kubenswrapper[4899]: I1003 09:43:50.991033 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b0e8903-c369-4e44-8003-6e83352ede7e-kube-api-access-mmlpm" (OuterVolumeSpecName: "kube-api-access-mmlpm") pod "4b0e8903-c369-4e44-8003-6e83352ede7e" (UID: "4b0e8903-c369-4e44-8003-6e83352ede7e"). InnerVolumeSpecName "kube-api-access-mmlpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:43:51 crc kubenswrapper[4899]: I1003 09:43:51.087696 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmlpm\" (UniqueName: \"kubernetes.io/projected/4b0e8903-c369-4e44-8003-6e83352ede7e-kube-api-access-mmlpm\") on node \"crc\" DevicePath \"\"" Oct 03 09:43:51 crc kubenswrapper[4899]: I1003 09:43:51.845702 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c457385a3754b4030ee3c56fc4b62475d061516796b8da8b9e46f1912ad88cc" Oct 03 09:43:51 crc kubenswrapper[4899]: I1003 09:43:51.845778 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8z59/crc-debug-rjgjh" Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.110650 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-b8z59/crc-debug-8czv9"] Oct 03 09:43:52 crc kubenswrapper[4899]: E1003 09:43:52.112118 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b0e8903-c369-4e44-8003-6e83352ede7e" containerName="container-00" Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.112203 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b0e8903-c369-4e44-8003-6e83352ede7e" containerName="container-00" Oct 03 09:43:52 crc kubenswrapper[4899]: E1003 09:43:52.112280 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67" containerName="registry-server" Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.112343 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67" containerName="registry-server" Oct 03 09:43:52 crc kubenswrapper[4899]: E1003 09:43:52.112405 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67" containerName="extract-content" Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.112468 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67" containerName="extract-content" Oct 03 09:43:52 crc kubenswrapper[4899]: E1003 09:43:52.112532 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67" containerName="extract-utilities" Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.112588 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67" containerName="extract-utilities" Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.112858 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b0e8903-c369-4e44-8003-6e83352ede7e" containerName="container-00" Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.112967 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd7fe77a-d8ea-4883-87e4-7d69c2a4dd67" containerName="registry-server" Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.113659 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8z59/crc-debug-8czv9" Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.208498 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/65fe1882-2a1f-4284-8f5d-c07611648069-host\") pod \"crc-debug-8czv9\" (UID: \"65fe1882-2a1f-4284-8f5d-c07611648069\") " pod="openshift-must-gather-b8z59/crc-debug-8czv9" Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.208932 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r24v4\" (UniqueName: \"kubernetes.io/projected/65fe1882-2a1f-4284-8f5d-c07611648069-kube-api-access-r24v4\") pod \"crc-debug-8czv9\" (UID: \"65fe1882-2a1f-4284-8f5d-c07611648069\") " pod="openshift-must-gather-b8z59/crc-debug-8czv9" Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.310587 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/65fe1882-2a1f-4284-8f5d-c07611648069-host\") pod \"crc-debug-8czv9\" (UID: \"65fe1882-2a1f-4284-8f5d-c07611648069\") " pod="openshift-must-gather-b8z59/crc-debug-8czv9" Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.310616 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/65fe1882-2a1f-4284-8f5d-c07611648069-host\") pod \"crc-debug-8czv9\" (UID: \"65fe1882-2a1f-4284-8f5d-c07611648069\") " pod="openshift-must-gather-b8z59/crc-debug-8czv9" Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.310884 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r24v4\" (UniqueName: \"kubernetes.io/projected/65fe1882-2a1f-4284-8f5d-c07611648069-kube-api-access-r24v4\") pod \"crc-debug-8czv9\" (UID: \"65fe1882-2a1f-4284-8f5d-c07611648069\") " pod="openshift-must-gather-b8z59/crc-debug-8czv9" Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.328817 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r24v4\" (UniqueName: \"kubernetes.io/projected/65fe1882-2a1f-4284-8f5d-c07611648069-kube-api-access-r24v4\") pod \"crc-debug-8czv9\" (UID: \"65fe1882-2a1f-4284-8f5d-c07611648069\") " pod="openshift-must-gather-b8z59/crc-debug-8czv9" Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.432549 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8z59/crc-debug-8czv9" Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.539085 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b0e8903-c369-4e44-8003-6e83352ede7e" path="/var/lib/kubelet/pods/4b0e8903-c369-4e44-8003-6e83352ede7e/volumes" Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.858860 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8z59/crc-debug-8czv9" event={"ID":"65fe1882-2a1f-4284-8f5d-c07611648069","Type":"ContainerStarted","Data":"d398d109f74ca9b7fda318f9b521b17ec130b92a6b45b4a0cc4cdb35f17ea08a"} Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.858917 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8z59/crc-debug-8czv9" event={"ID":"65fe1882-2a1f-4284-8f5d-c07611648069","Type":"ContainerStarted","Data":"c77fa477b6a431b7522c4ac5553f3296567eff8ea2ddf3d3953ae870b24813e1"} Oct 03 09:43:52 crc kubenswrapper[4899]: I1003 09:43:52.878523 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-b8z59/crc-debug-8czv9" podStartSLOduration=0.878503935 podStartE2EDuration="878.503935ms" podCreationTimestamp="2025-10-03 09:43:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 09:43:52.871940858 +0000 UTC m=+3806.979425811" watchObservedRunningTime="2025-10-03 09:43:52.878503935 +0000 UTC m=+3806.985988888" Oct 03 09:43:53 crc kubenswrapper[4899]: I1003 09:43:53.881410 4899 generic.go:334] "Generic (PLEG): container finished" podID="65fe1882-2a1f-4284-8f5d-c07611648069" containerID="d398d109f74ca9b7fda318f9b521b17ec130b92a6b45b4a0cc4cdb35f17ea08a" exitCode=0 Oct 03 09:43:53 crc kubenswrapper[4899]: I1003 09:43:53.881460 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8z59/crc-debug-8czv9" event={"ID":"65fe1882-2a1f-4284-8f5d-c07611648069","Type":"ContainerDied","Data":"d398d109f74ca9b7fda318f9b521b17ec130b92a6b45b4a0cc4cdb35f17ea08a"} Oct 03 09:43:54 crc kubenswrapper[4899]: I1003 09:43:54.998104 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8z59/crc-debug-8czv9" Oct 03 09:43:55 crc kubenswrapper[4899]: I1003 09:43:55.055951 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r24v4\" (UniqueName: \"kubernetes.io/projected/65fe1882-2a1f-4284-8f5d-c07611648069-kube-api-access-r24v4\") pod \"65fe1882-2a1f-4284-8f5d-c07611648069\" (UID: \"65fe1882-2a1f-4284-8f5d-c07611648069\") " Oct 03 09:43:55 crc kubenswrapper[4899]: I1003 09:43:55.056163 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/65fe1882-2a1f-4284-8f5d-c07611648069-host\") pod \"65fe1882-2a1f-4284-8f5d-c07611648069\" (UID: \"65fe1882-2a1f-4284-8f5d-c07611648069\") " Oct 03 09:43:55 crc kubenswrapper[4899]: I1003 09:43:55.056685 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65fe1882-2a1f-4284-8f5d-c07611648069-host" (OuterVolumeSpecName: "host") pod "65fe1882-2a1f-4284-8f5d-c07611648069" (UID: "65fe1882-2a1f-4284-8f5d-c07611648069"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 09:43:55 crc kubenswrapper[4899]: I1003 09:43:55.066831 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65fe1882-2a1f-4284-8f5d-c07611648069-kube-api-access-r24v4" (OuterVolumeSpecName: "kube-api-access-r24v4") pod "65fe1882-2a1f-4284-8f5d-c07611648069" (UID: "65fe1882-2a1f-4284-8f5d-c07611648069"). InnerVolumeSpecName "kube-api-access-r24v4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:43:55 crc kubenswrapper[4899]: I1003 09:43:55.158008 4899 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/65fe1882-2a1f-4284-8f5d-c07611648069-host\") on node \"crc\" DevicePath \"\"" Oct 03 09:43:55 crc kubenswrapper[4899]: I1003 09:43:55.158420 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r24v4\" (UniqueName: \"kubernetes.io/projected/65fe1882-2a1f-4284-8f5d-c07611648069-kube-api-access-r24v4\") on node \"crc\" DevicePath \"\"" Oct 03 09:43:55 crc kubenswrapper[4899]: I1003 09:43:55.526604 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:43:55 crc kubenswrapper[4899]: E1003 09:43:55.526988 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:43:55 crc kubenswrapper[4899]: I1003 09:43:55.898385 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8z59/crc-debug-8czv9" event={"ID":"65fe1882-2a1f-4284-8f5d-c07611648069","Type":"ContainerDied","Data":"c77fa477b6a431b7522c4ac5553f3296567eff8ea2ddf3d3953ae870b24813e1"} Oct 03 09:43:55 crc kubenswrapper[4899]: I1003 09:43:55.898436 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c77fa477b6a431b7522c4ac5553f3296567eff8ea2ddf3d3953ae870b24813e1" Oct 03 09:43:55 crc kubenswrapper[4899]: I1003 09:43:55.898515 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8z59/crc-debug-8czv9" Oct 03 09:43:59 crc kubenswrapper[4899]: I1003 09:43:59.932465 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-b8z59/crc-debug-8czv9"] Oct 03 09:43:59 crc kubenswrapper[4899]: I1003 09:43:59.941167 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-b8z59/crc-debug-8czv9"] Oct 03 09:44:00 crc kubenswrapper[4899]: I1003 09:44:00.538340 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65fe1882-2a1f-4284-8f5d-c07611648069" path="/var/lib/kubelet/pods/65fe1882-2a1f-4284-8f5d-c07611648069/volumes" Oct 03 09:44:01 crc kubenswrapper[4899]: I1003 09:44:01.085630 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-b8z59/crc-debug-mn72q"] Oct 03 09:44:01 crc kubenswrapper[4899]: E1003 09:44:01.086155 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65fe1882-2a1f-4284-8f5d-c07611648069" containerName="container-00" Oct 03 09:44:01 crc kubenswrapper[4899]: I1003 09:44:01.086172 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="65fe1882-2a1f-4284-8f5d-c07611648069" containerName="container-00" Oct 03 09:44:01 crc kubenswrapper[4899]: I1003 09:44:01.086408 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="65fe1882-2a1f-4284-8f5d-c07611648069" containerName="container-00" Oct 03 09:44:01 crc kubenswrapper[4899]: I1003 09:44:01.087061 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8z59/crc-debug-mn72q" Oct 03 09:44:01 crc kubenswrapper[4899]: I1003 09:44:01.186584 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a9ebb961-19a4-4714-b6b9-d56cd919fb6a-host\") pod \"crc-debug-mn72q\" (UID: \"a9ebb961-19a4-4714-b6b9-d56cd919fb6a\") " pod="openshift-must-gather-b8z59/crc-debug-mn72q" Oct 03 09:44:01 crc kubenswrapper[4899]: I1003 09:44:01.186643 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87fzp\" (UniqueName: \"kubernetes.io/projected/a9ebb961-19a4-4714-b6b9-d56cd919fb6a-kube-api-access-87fzp\") pod \"crc-debug-mn72q\" (UID: \"a9ebb961-19a4-4714-b6b9-d56cd919fb6a\") " pod="openshift-must-gather-b8z59/crc-debug-mn72q" Oct 03 09:44:01 crc kubenswrapper[4899]: I1003 09:44:01.288960 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a9ebb961-19a4-4714-b6b9-d56cd919fb6a-host\") pod \"crc-debug-mn72q\" (UID: \"a9ebb961-19a4-4714-b6b9-d56cd919fb6a\") " pod="openshift-must-gather-b8z59/crc-debug-mn72q" Oct 03 09:44:01 crc kubenswrapper[4899]: I1003 09:44:01.289033 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87fzp\" (UniqueName: \"kubernetes.io/projected/a9ebb961-19a4-4714-b6b9-d56cd919fb6a-kube-api-access-87fzp\") pod \"crc-debug-mn72q\" (UID: \"a9ebb961-19a4-4714-b6b9-d56cd919fb6a\") " pod="openshift-must-gather-b8z59/crc-debug-mn72q" Oct 03 09:44:01 crc kubenswrapper[4899]: I1003 09:44:01.289150 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a9ebb961-19a4-4714-b6b9-d56cd919fb6a-host\") pod \"crc-debug-mn72q\" (UID: \"a9ebb961-19a4-4714-b6b9-d56cd919fb6a\") " pod="openshift-must-gather-b8z59/crc-debug-mn72q" Oct 03 09:44:01 crc kubenswrapper[4899]: I1003 09:44:01.311098 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87fzp\" (UniqueName: \"kubernetes.io/projected/a9ebb961-19a4-4714-b6b9-d56cd919fb6a-kube-api-access-87fzp\") pod \"crc-debug-mn72q\" (UID: \"a9ebb961-19a4-4714-b6b9-d56cd919fb6a\") " pod="openshift-must-gather-b8z59/crc-debug-mn72q" Oct 03 09:44:01 crc kubenswrapper[4899]: I1003 09:44:01.404337 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8z59/crc-debug-mn72q" Oct 03 09:44:01 crc kubenswrapper[4899]: I1003 09:44:01.960117 4899 generic.go:334] "Generic (PLEG): container finished" podID="a9ebb961-19a4-4714-b6b9-d56cd919fb6a" containerID="1a062c5ceb85f2e557e5a4484dba586ab038232289cd151382574c551d7b457f" exitCode=0 Oct 03 09:44:01 crc kubenswrapper[4899]: I1003 09:44:01.960176 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8z59/crc-debug-mn72q" event={"ID":"a9ebb961-19a4-4714-b6b9-d56cd919fb6a","Type":"ContainerDied","Data":"1a062c5ceb85f2e557e5a4484dba586ab038232289cd151382574c551d7b457f"} Oct 03 09:44:01 crc kubenswrapper[4899]: I1003 09:44:01.960209 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8z59/crc-debug-mn72q" event={"ID":"a9ebb961-19a4-4714-b6b9-d56cd919fb6a","Type":"ContainerStarted","Data":"abfafac25ed5c5938ab817a1aaefd3156d39c7a4c0e75b94e11389af014a98d9"} Oct 03 09:44:01 crc kubenswrapper[4899]: I1003 09:44:01.999132 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-b8z59/crc-debug-mn72q"] Oct 03 09:44:02 crc kubenswrapper[4899]: I1003 09:44:02.007187 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-b8z59/crc-debug-mn72q"] Oct 03 09:44:03 crc kubenswrapper[4899]: I1003 09:44:03.069362 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8z59/crc-debug-mn72q" Oct 03 09:44:03 crc kubenswrapper[4899]: I1003 09:44:03.122813 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a9ebb961-19a4-4714-b6b9-d56cd919fb6a-host\") pod \"a9ebb961-19a4-4714-b6b9-d56cd919fb6a\" (UID: \"a9ebb961-19a4-4714-b6b9-d56cd919fb6a\") " Oct 03 09:44:03 crc kubenswrapper[4899]: I1003 09:44:03.123153 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a9ebb961-19a4-4714-b6b9-d56cd919fb6a-host" (OuterVolumeSpecName: "host") pod "a9ebb961-19a4-4714-b6b9-d56cd919fb6a" (UID: "a9ebb961-19a4-4714-b6b9-d56cd919fb6a"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 09:44:03 crc kubenswrapper[4899]: I1003 09:44:03.123312 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87fzp\" (UniqueName: \"kubernetes.io/projected/a9ebb961-19a4-4714-b6b9-d56cd919fb6a-kube-api-access-87fzp\") pod \"a9ebb961-19a4-4714-b6b9-d56cd919fb6a\" (UID: \"a9ebb961-19a4-4714-b6b9-d56cd919fb6a\") " Oct 03 09:44:03 crc kubenswrapper[4899]: I1003 09:44:03.123811 4899 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a9ebb961-19a4-4714-b6b9-d56cd919fb6a-host\") on node \"crc\" DevicePath \"\"" Oct 03 09:44:03 crc kubenswrapper[4899]: I1003 09:44:03.145602 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9ebb961-19a4-4714-b6b9-d56cd919fb6a-kube-api-access-87fzp" (OuterVolumeSpecName: "kube-api-access-87fzp") pod "a9ebb961-19a4-4714-b6b9-d56cd919fb6a" (UID: "a9ebb961-19a4-4714-b6b9-d56cd919fb6a"). InnerVolumeSpecName "kube-api-access-87fzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:44:03 crc kubenswrapper[4899]: I1003 09:44:03.225462 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87fzp\" (UniqueName: \"kubernetes.io/projected/a9ebb961-19a4-4714-b6b9-d56cd919fb6a-kube-api-access-87fzp\") on node \"crc\" DevicePath \"\"" Oct 03 09:44:03 crc kubenswrapper[4899]: I1003 09:44:03.504955 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d_f5e06749-553d-438a-b1be-4db08df71d67/util/0.log" Oct 03 09:44:03 crc kubenswrapper[4899]: I1003 09:44:03.711638 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d_f5e06749-553d-438a-b1be-4db08df71d67/pull/0.log" Oct 03 09:44:03 crc kubenswrapper[4899]: I1003 09:44:03.720064 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d_f5e06749-553d-438a-b1be-4db08df71d67/pull/0.log" Oct 03 09:44:03 crc kubenswrapper[4899]: I1003 09:44:03.742170 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d_f5e06749-553d-438a-b1be-4db08df71d67/util/0.log" Oct 03 09:44:03 crc kubenswrapper[4899]: I1003 09:44:03.955486 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d_f5e06749-553d-438a-b1be-4db08df71d67/pull/0.log" Oct 03 09:44:03 crc kubenswrapper[4899]: I1003 09:44:03.959518 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d_f5e06749-553d-438a-b1be-4db08df71d67/util/0.log" Oct 03 09:44:03 crc kubenswrapper[4899]: I1003 09:44:03.988718 4899 scope.go:117] "RemoveContainer" containerID="1a062c5ceb85f2e557e5a4484dba586ab038232289cd151382574c551d7b457f" Oct 03 09:44:03 crc kubenswrapper[4899]: I1003 09:44:03.988885 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8z59/crc-debug-mn72q" Oct 03 09:44:04 crc kubenswrapper[4899]: I1003 09:44:04.012951 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1e2086809a5259d97534a9997ce021aa879cadf2377934fb8784324bfdmjs2d_f5e06749-553d-438a-b1be-4db08df71d67/extract/0.log" Oct 03 09:44:04 crc kubenswrapper[4899]: I1003 09:44:04.189386 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6c675fb79f-hz7qr_c90f297d-af70-423f-b34d-8b3599ba12eb/kube-rbac-proxy/0.log" Oct 03 09:44:04 crc kubenswrapper[4899]: I1003 09:44:04.240635 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6c675fb79f-hz7qr_c90f297d-af70-423f-b34d-8b3599ba12eb/manager/0.log" Oct 03 09:44:04 crc kubenswrapper[4899]: I1003 09:44:04.270560 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-79d68d6c85-bj2gj_3e876467-fd1a-4b4c-b62b-d1641400a756/kube-rbac-proxy/0.log" Oct 03 09:44:04 crc kubenswrapper[4899]: I1003 09:44:04.404468 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-79d68d6c85-bj2gj_3e876467-fd1a-4b4c-b62b-d1641400a756/manager/0.log" Oct 03 09:44:04 crc kubenswrapper[4899]: I1003 09:44:04.462064 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-jqgrz_ba75f5b9-b92b-4cd7-98c9-1bcf6b772940/kube-rbac-proxy/0.log" Oct 03 09:44:04 crc kubenswrapper[4899]: I1003 09:44:04.465860 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-jqgrz_ba75f5b9-b92b-4cd7-98c9-1bcf6b772940/manager/0.log" Oct 03 09:44:04 crc kubenswrapper[4899]: I1003 09:44:04.539238 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9ebb961-19a4-4714-b6b9-d56cd919fb6a" path="/var/lib/kubelet/pods/a9ebb961-19a4-4714-b6b9-d56cd919fb6a/volumes" Oct 03 09:44:04 crc kubenswrapper[4899]: I1003 09:44:04.612988 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-846dff85b5-mhkqv_b32f9b3e-72a8-4229-9715-8fdd98877a04/kube-rbac-proxy/0.log" Oct 03 09:44:04 crc kubenswrapper[4899]: I1003 09:44:04.743724 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-846dff85b5-mhkqv_b32f9b3e-72a8-4229-9715-8fdd98877a04/manager/0.log" Oct 03 09:44:04 crc kubenswrapper[4899]: I1003 09:44:04.798110 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-599898f689-5c9bs_b14b68d1-483a-419c-b696-a915c6d25d09/kube-rbac-proxy/0.log" Oct 03 09:44:04 crc kubenswrapper[4899]: I1003 09:44:04.817545 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-599898f689-5c9bs_b14b68d1-483a-419c-b696-a915c6d25d09/manager/0.log" Oct 03 09:44:04 crc kubenswrapper[4899]: I1003 09:44:04.924187 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6769b867d9-k8snv_a8895f13-915f-45f7-8156-43a7f11ac9bb/kube-rbac-proxy/0.log" Oct 03 09:44:05 crc kubenswrapper[4899]: I1003 09:44:05.023382 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6769b867d9-k8snv_a8895f13-915f-45f7-8156-43a7f11ac9bb/manager/0.log" Oct 03 09:44:05 crc kubenswrapper[4899]: I1003 09:44:05.075546 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5fbf469cd7-9qcj2_2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919/kube-rbac-proxy/0.log" Oct 03 09:44:05 crc kubenswrapper[4899]: I1003 09:44:05.227829 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5fbf469cd7-9qcj2_2a9a8ff5-a009-4d5f-bc3a-8e449b5fd919/manager/0.log" Oct 03 09:44:05 crc kubenswrapper[4899]: I1003 09:44:05.233958 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-84bc9db6cc-hpqjs_39eb57f7-d61f-4445-aea3-6b96585c4f76/kube-rbac-proxy/0.log" Oct 03 09:44:05 crc kubenswrapper[4899]: I1003 09:44:05.306342 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-84bc9db6cc-hpqjs_39eb57f7-d61f-4445-aea3-6b96585c4f76/manager/0.log" Oct 03 09:44:05 crc kubenswrapper[4899]: I1003 09:44:05.417109 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7f55849f88-qxxgx_d8d28854-8e4e-47cd-847a-c58811fb4f91/kube-rbac-proxy/0.log" Oct 03 09:44:05 crc kubenswrapper[4899]: I1003 09:44:05.500712 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7f55849f88-qxxgx_d8d28854-8e4e-47cd-847a-c58811fb4f91/manager/0.log" Oct 03 09:44:05 crc kubenswrapper[4899]: I1003 09:44:05.609578 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6fd6854b49-hwhx2_ed44541c-bb31-43bb-92eb-298b01820505/kube-rbac-proxy/0.log" Oct 03 09:44:05 crc kubenswrapper[4899]: I1003 09:44:05.611359 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6fd6854b49-hwhx2_ed44541c-bb31-43bb-92eb-298b01820505/manager/0.log" Oct 03 09:44:05 crc kubenswrapper[4899]: I1003 09:44:05.717191 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5c468bf4d4-trrmd_110672ad-3117-4a7c-8614-f12ab626e28c/kube-rbac-proxy/0.log" Oct 03 09:44:05 crc kubenswrapper[4899]: I1003 09:44:05.799548 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5c468bf4d4-trrmd_110672ad-3117-4a7c-8614-f12ab626e28c/manager/0.log" Oct 03 09:44:05 crc kubenswrapper[4899]: I1003 09:44:05.871836 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6574bf987d-dglwx_f500aadc-0447-4d26-9ab4-83f64b084a89/kube-rbac-proxy/0.log" Oct 03 09:44:05 crc kubenswrapper[4899]: I1003 09:44:05.957059 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6574bf987d-dglwx_f500aadc-0447-4d26-9ab4-83f64b084a89/manager/0.log" Oct 03 09:44:06 crc kubenswrapper[4899]: I1003 09:44:06.031446 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-555c7456bd-78vk7_20ecf87a-f08f-4d2f-92fd-ba14a9a9e5b2/kube-rbac-proxy/0.log" Oct 03 09:44:06 crc kubenswrapper[4899]: I1003 09:44:06.135940 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-555c7456bd-78vk7_20ecf87a-f08f-4d2f-92fd-ba14a9a9e5b2/manager/0.log" Oct 03 09:44:06 crc kubenswrapper[4899]: I1003 09:44:06.220961 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-59d6cfdf45-7zf22_de3ec379-fb48-440a-8502-3650db78804a/kube-rbac-proxy/0.log" Oct 03 09:44:06 crc kubenswrapper[4899]: I1003 09:44:06.273225 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-59d6cfdf45-7zf22_de3ec379-fb48-440a-8502-3650db78804a/manager/0.log" Oct 03 09:44:06 crc kubenswrapper[4899]: I1003 09:44:06.350502 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg_62eda81d-d797-4ed4-9687-9cdc7c49decb/kube-rbac-proxy/0.log" Oct 03 09:44:06 crc kubenswrapper[4899]: I1003 09:44:06.438268 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6f64c4d678w5ccg_62eda81d-d797-4ed4-9687-9cdc7c49decb/manager/0.log" Oct 03 09:44:06 crc kubenswrapper[4899]: I1003 09:44:06.514048 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7cfd4b6679-jn88c_2bb995f5-5432-40fc-a196-71cac18de666/kube-rbac-proxy/0.log" Oct 03 09:44:06 crc kubenswrapper[4899]: I1003 09:44:06.700938 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-669c8666b5-gt89m_99f6fa80-7e18-4fa5-8421-8ff2e51fbdbc/kube-rbac-proxy/0.log" Oct 03 09:44:06 crc kubenswrapper[4899]: I1003 09:44:06.945289 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-cq96k_3e217d24-f3ae-48f2-87bb-d9b735659f5d/registry-server/0.log" Oct 03 09:44:06 crc kubenswrapper[4899]: I1003 09:44:06.980836 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-669c8666b5-gt89m_99f6fa80-7e18-4fa5-8421-8ff2e51fbdbc/operator/0.log" Oct 03 09:44:07 crc kubenswrapper[4899]: I1003 09:44:07.180319 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-688db7b6c7-7psz6_2cbb69db-51ad-471c-be3a-57b9422f11cd/kube-rbac-proxy/0.log" Oct 03 09:44:07 crc kubenswrapper[4899]: I1003 09:44:07.262265 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-688db7b6c7-7psz6_2cbb69db-51ad-471c-be3a-57b9422f11cd/manager/0.log" Oct 03 09:44:07 crc kubenswrapper[4899]: I1003 09:44:07.454711 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-7d8bb7f44c-bnb75_6008780d-5be3-4fda-8526-594566364ae4/kube-rbac-proxy/0.log" Oct 03 09:44:07 crc kubenswrapper[4899]: I1003 09:44:07.508820 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-7d8bb7f44c-bnb75_6008780d-5be3-4fda-8526-594566364ae4/manager/0.log" Oct 03 09:44:07 crc kubenswrapper[4899]: I1003 09:44:07.701643 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-n8q4j_5c4913b3-fd20-4eee-99df-1900f5486f51/operator/0.log" Oct 03 09:44:07 crc kubenswrapper[4899]: I1003 09:44:07.758732 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7cfd4b6679-jn88c_2bb995f5-5432-40fc-a196-71cac18de666/manager/0.log" Oct 03 09:44:07 crc kubenswrapper[4899]: I1003 09:44:07.764511 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-f5mgb_837b07a9-5832-4d01-b257-ac3fca82b121/kube-rbac-proxy/0.log" Oct 03 09:44:07 crc kubenswrapper[4899]: I1003 09:44:07.844169 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-f5mgb_837b07a9-5832-4d01-b257-ac3fca82b121/manager/0.log" Oct 03 09:44:07 crc kubenswrapper[4899]: I1003 09:44:07.910854 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5db5cf686f-zvwbw_6c6cb9a6-eacf-411e-8c19-ac8ee51eced8/kube-rbac-proxy/0.log" Oct 03 09:44:08 crc kubenswrapper[4899]: I1003 09:44:08.000270 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5db5cf686f-zvwbw_6c6cb9a6-eacf-411e-8c19-ac8ee51eced8/manager/0.log" Oct 03 09:44:08 crc kubenswrapper[4899]: I1003 09:44:08.045567 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-lpwr4_03abda9b-2057-42c8-8161-4104ecb96027/kube-rbac-proxy/0.log" Oct 03 09:44:08 crc kubenswrapper[4899]: I1003 09:44:08.100634 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-lpwr4_03abda9b-2057-42c8-8161-4104ecb96027/manager/0.log" Oct 03 09:44:08 crc kubenswrapper[4899]: I1003 09:44:08.193104 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-fcd7d9895-lxmmw_7496bf16-1fc1-44ec-b96b-e75e00652634/kube-rbac-proxy/0.log" Oct 03 09:44:08 crc kubenswrapper[4899]: I1003 09:44:08.196587 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-fcd7d9895-lxmmw_7496bf16-1fc1-44ec-b96b-e75e00652634/manager/0.log" Oct 03 09:44:09 crc kubenswrapper[4899]: I1003 09:44:09.526665 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:44:09 crc kubenswrapper[4899]: E1003 09:44:09.527270 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:44:21 crc kubenswrapper[4899]: I1003 09:44:21.800749 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-f8jvt_ae242b31-ad12-4328-8818-313458ed46aa/control-plane-machine-set-operator/0.log" Oct 03 09:44:21 crc kubenswrapper[4899]: I1003 09:44:21.970268 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-lw5xr_a8b9468d-675b-42d9-b5e8-b45f5d35deef/kube-rbac-proxy/0.log" Oct 03 09:44:22 crc kubenswrapper[4899]: I1003 09:44:22.008970 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-lw5xr_a8b9468d-675b-42d9-b5e8-b45f5d35deef/machine-api-operator/0.log" Oct 03 09:44:24 crc kubenswrapper[4899]: I1003 09:44:24.528468 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:44:24 crc kubenswrapper[4899]: E1003 09:44:24.529189 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:44:32 crc kubenswrapper[4899]: I1003 09:44:32.804456 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-r2625_9892f98e-dee3-42ea-88c5-2a17dc19988d/cert-manager-controller/0.log" Oct 03 09:44:32 crc kubenswrapper[4899]: I1003 09:44:32.927452 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-prrpr_00a2b8ef-3f94-4a34-8692-6fd9fa800cd9/cert-manager-cainjector/0.log" Oct 03 09:44:32 crc kubenswrapper[4899]: I1003 09:44:32.979264 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-jjw6p_355746c7-f59d-41d1-9cbe-c3668e16d478/cert-manager-webhook/0.log" Oct 03 09:44:39 crc kubenswrapper[4899]: I1003 09:44:39.528340 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:44:39 crc kubenswrapper[4899]: E1003 09:44:39.529220 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:44:43 crc kubenswrapper[4899]: I1003 09:44:43.739012 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-9lk8l_932306e1-0688-47a3-af53-642db1b63eb0/nmstate-console-plugin/0.log" Oct 03 09:44:43 crc kubenswrapper[4899]: I1003 09:44:43.933786 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-jqv6w_e97cb3b7-2cf2-4021-b189-0e4c79b60f9a/kube-rbac-proxy/0.log" Oct 03 09:44:43 crc kubenswrapper[4899]: I1003 09:44:43.943349 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-wggc2_3bcb7a6a-9902-4fea-a7d2-c7a508d7f695/nmstate-handler/0.log" Oct 03 09:44:43 crc kubenswrapper[4899]: I1003 09:44:43.969878 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-jqv6w_e97cb3b7-2cf2-4021-b189-0e4c79b60f9a/nmstate-metrics/0.log" Oct 03 09:44:44 crc kubenswrapper[4899]: I1003 09:44:44.124823 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-kf8v5_65081539-f48e-404c-96a9-c1f8035404ed/nmstate-operator/0.log" Oct 03 09:44:44 crc kubenswrapper[4899]: I1003 09:44:44.190098 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-65rlw_cfd8a398-f0f8-47ed-9f92-49edea78e66b/nmstate-webhook/0.log" Oct 03 09:44:50 crc kubenswrapper[4899]: I1003 09:44:50.861638 4899 scope.go:117] "RemoveContainer" containerID="bea858ebef3e3c09bb81c722f846b5c18022102a2e4273f5976661e00726108c" Oct 03 09:44:54 crc kubenswrapper[4899]: I1003 09:44:54.527452 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:44:54 crc kubenswrapper[4899]: E1003 09:44:54.528495 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:44:56 crc kubenswrapper[4899]: I1003 09:44:56.894975 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-mgk7c_8ec8f47a-3de7-4e04-a612-dbf72a0a21d5/kube-rbac-proxy/0.log" Oct 03 09:44:57 crc kubenswrapper[4899]: I1003 09:44:57.075124 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-mgk7c_8ec8f47a-3de7-4e04-a612-dbf72a0a21d5/controller/0.log" Oct 03 09:44:57 crc kubenswrapper[4899]: I1003 09:44:57.172549 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-frr-files/0.log" Oct 03 09:44:57 crc kubenswrapper[4899]: I1003 09:44:57.359220 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-reloader/0.log" Oct 03 09:44:57 crc kubenswrapper[4899]: I1003 09:44:57.401784 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-metrics/0.log" Oct 03 09:44:57 crc kubenswrapper[4899]: I1003 09:44:57.414568 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-reloader/0.log" Oct 03 09:44:57 crc kubenswrapper[4899]: I1003 09:44:57.438249 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-frr-files/0.log" Oct 03 09:44:57 crc kubenswrapper[4899]: I1003 09:44:57.654617 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-frr-files/0.log" Oct 03 09:44:57 crc kubenswrapper[4899]: I1003 09:44:57.661021 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-reloader/0.log" Oct 03 09:44:57 crc kubenswrapper[4899]: I1003 09:44:57.668016 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-metrics/0.log" Oct 03 09:44:57 crc kubenswrapper[4899]: I1003 09:44:57.692401 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-metrics/0.log" Oct 03 09:44:57 crc kubenswrapper[4899]: I1003 09:44:57.868295 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-reloader/0.log" Oct 03 09:44:57 crc kubenswrapper[4899]: I1003 09:44:57.880320 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/controller/0.log" Oct 03 09:44:57 crc kubenswrapper[4899]: I1003 09:44:57.884847 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-metrics/0.log" Oct 03 09:44:57 crc kubenswrapper[4899]: I1003 09:44:57.906010 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/cp-frr-files/0.log" Oct 03 09:44:58 crc kubenswrapper[4899]: I1003 09:44:58.074665 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/frr-metrics/0.log" Oct 03 09:44:58 crc kubenswrapper[4899]: I1003 09:44:58.128071 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/kube-rbac-proxy/0.log" Oct 03 09:44:58 crc kubenswrapper[4899]: I1003 09:44:58.144522 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/kube-rbac-proxy-frr/0.log" Oct 03 09:44:58 crc kubenswrapper[4899]: I1003 09:44:58.288695 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/reloader/0.log" Oct 03 09:44:58 crc kubenswrapper[4899]: I1003 09:44:58.352972 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-6t985_d43f846f-0e5f-4bb8-9041-6b471ca7e6df/frr-k8s-webhook-server/0.log" Oct 03 09:44:58 crc kubenswrapper[4899]: I1003 09:44:58.613183 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7dd48c8965-pvfqh_8643a4de-b352-4699-8054-7d4e4f97a946/manager/0.log" Oct 03 09:44:58 crc kubenswrapper[4899]: I1003 09:44:58.815159 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-798f6d5f9c-6p988_cd92ea76-f379-4b3d-aac6-2143d789e086/webhook-server/0.log" Oct 03 09:44:58 crc kubenswrapper[4899]: I1003 09:44:58.925917 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-9jkt6_94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6/kube-rbac-proxy/0.log" Oct 03 09:44:59 crc kubenswrapper[4899]: I1003 09:44:59.659471 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-9jkt6_94c1ed1d-c44a-4e19-a1ed-3583cd9f29f6/speaker/0.log" Oct 03 09:44:59 crc kubenswrapper[4899]: I1003 09:44:59.717690 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dq8r4_b1b20a0b-2a5d-472e-8ccd-e4e89d466eed/frr/0.log" Oct 03 09:45:00 crc kubenswrapper[4899]: I1003 09:45:00.146134 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq"] Oct 03 09:45:00 crc kubenswrapper[4899]: E1003 09:45:00.146667 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9ebb961-19a4-4714-b6b9-d56cd919fb6a" containerName="container-00" Oct 03 09:45:00 crc kubenswrapper[4899]: I1003 09:45:00.146687 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9ebb961-19a4-4714-b6b9-d56cd919fb6a" containerName="container-00" Oct 03 09:45:00 crc kubenswrapper[4899]: I1003 09:45:00.146869 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9ebb961-19a4-4714-b6b9-d56cd919fb6a" containerName="container-00" Oct 03 09:45:00 crc kubenswrapper[4899]: I1003 09:45:00.147584 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq" Oct 03 09:45:00 crc kubenswrapper[4899]: I1003 09:45:00.150086 4899 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 09:45:00 crc kubenswrapper[4899]: I1003 09:45:00.150394 4899 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 09:45:00 crc kubenswrapper[4899]: I1003 09:45:00.157831 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq"] Oct 03 09:45:00 crc kubenswrapper[4899]: I1003 09:45:00.226327 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a46aeba5-12a6-4eff-9032-77e31126cd7c-secret-volume\") pod \"collect-profiles-29324745-g64gq\" (UID: \"a46aeba5-12a6-4eff-9032-77e31126cd7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq" Oct 03 09:45:00 crc kubenswrapper[4899]: I1003 09:45:00.226401 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a46aeba5-12a6-4eff-9032-77e31126cd7c-config-volume\") pod \"collect-profiles-29324745-g64gq\" (UID: \"a46aeba5-12a6-4eff-9032-77e31126cd7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq" Oct 03 09:45:00 crc kubenswrapper[4899]: I1003 09:45:00.226459 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x4h2\" (UniqueName: \"kubernetes.io/projected/a46aeba5-12a6-4eff-9032-77e31126cd7c-kube-api-access-8x4h2\") pod \"collect-profiles-29324745-g64gq\" (UID: \"a46aeba5-12a6-4eff-9032-77e31126cd7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq" Oct 03 09:45:00 crc kubenswrapper[4899]: I1003 09:45:00.329244 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x4h2\" (UniqueName: \"kubernetes.io/projected/a46aeba5-12a6-4eff-9032-77e31126cd7c-kube-api-access-8x4h2\") pod \"collect-profiles-29324745-g64gq\" (UID: \"a46aeba5-12a6-4eff-9032-77e31126cd7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq" Oct 03 09:45:00 crc kubenswrapper[4899]: I1003 09:45:00.329493 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a46aeba5-12a6-4eff-9032-77e31126cd7c-secret-volume\") pod \"collect-profiles-29324745-g64gq\" (UID: \"a46aeba5-12a6-4eff-9032-77e31126cd7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq" Oct 03 09:45:00 crc kubenswrapper[4899]: I1003 09:45:00.329543 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a46aeba5-12a6-4eff-9032-77e31126cd7c-config-volume\") pod \"collect-profiles-29324745-g64gq\" (UID: \"a46aeba5-12a6-4eff-9032-77e31126cd7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq" Oct 03 09:45:00 crc kubenswrapper[4899]: I1003 09:45:00.330676 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a46aeba5-12a6-4eff-9032-77e31126cd7c-config-volume\") pod \"collect-profiles-29324745-g64gq\" (UID: \"a46aeba5-12a6-4eff-9032-77e31126cd7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq" Oct 03 09:45:00 crc kubenswrapper[4899]: I1003 09:45:00.342298 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a46aeba5-12a6-4eff-9032-77e31126cd7c-secret-volume\") pod \"collect-profiles-29324745-g64gq\" (UID: \"a46aeba5-12a6-4eff-9032-77e31126cd7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq" Oct 03 09:45:00 crc kubenswrapper[4899]: I1003 09:45:00.347617 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x4h2\" (UniqueName: \"kubernetes.io/projected/a46aeba5-12a6-4eff-9032-77e31126cd7c-kube-api-access-8x4h2\") pod \"collect-profiles-29324745-g64gq\" (UID: \"a46aeba5-12a6-4eff-9032-77e31126cd7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq" Oct 03 09:45:00 crc kubenswrapper[4899]: I1003 09:45:00.491718 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq" Oct 03 09:45:00 crc kubenswrapper[4899]: I1003 09:45:00.974650 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq"] Oct 03 09:45:01 crc kubenswrapper[4899]: I1003 09:45:01.473065 4899 generic.go:334] "Generic (PLEG): container finished" podID="a46aeba5-12a6-4eff-9032-77e31126cd7c" containerID="6729c23d87e93892601a37afd2199c0d6cf6f9f4c045e2046aa862eee850ab0c" exitCode=0 Oct 03 09:45:01 crc kubenswrapper[4899]: I1003 09:45:01.473203 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq" event={"ID":"a46aeba5-12a6-4eff-9032-77e31126cd7c","Type":"ContainerDied","Data":"6729c23d87e93892601a37afd2199c0d6cf6f9f4c045e2046aa862eee850ab0c"} Oct 03 09:45:01 crc kubenswrapper[4899]: I1003 09:45:01.473386 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq" event={"ID":"a46aeba5-12a6-4eff-9032-77e31126cd7c","Type":"ContainerStarted","Data":"024999bd816ac9945b88e0e0cbb591562fb8a5852fe59e52f21c8830fbfa6e5a"} Oct 03 09:45:02 crc kubenswrapper[4899]: I1003 09:45:02.868176 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq" Oct 03 09:45:02 crc kubenswrapper[4899]: I1003 09:45:02.981594 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8x4h2\" (UniqueName: \"kubernetes.io/projected/a46aeba5-12a6-4eff-9032-77e31126cd7c-kube-api-access-8x4h2\") pod \"a46aeba5-12a6-4eff-9032-77e31126cd7c\" (UID: \"a46aeba5-12a6-4eff-9032-77e31126cd7c\") " Oct 03 09:45:02 crc kubenswrapper[4899]: I1003 09:45:02.982408 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a46aeba5-12a6-4eff-9032-77e31126cd7c-secret-volume\") pod \"a46aeba5-12a6-4eff-9032-77e31126cd7c\" (UID: \"a46aeba5-12a6-4eff-9032-77e31126cd7c\") " Oct 03 09:45:02 crc kubenswrapper[4899]: I1003 09:45:02.982499 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a46aeba5-12a6-4eff-9032-77e31126cd7c-config-volume\") pod \"a46aeba5-12a6-4eff-9032-77e31126cd7c\" (UID: \"a46aeba5-12a6-4eff-9032-77e31126cd7c\") " Oct 03 09:45:02 crc kubenswrapper[4899]: I1003 09:45:02.983343 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a46aeba5-12a6-4eff-9032-77e31126cd7c-config-volume" (OuterVolumeSpecName: "config-volume") pod "a46aeba5-12a6-4eff-9032-77e31126cd7c" (UID: "a46aeba5-12a6-4eff-9032-77e31126cd7c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 09:45:02 crc kubenswrapper[4899]: I1003 09:45:02.991087 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a46aeba5-12a6-4eff-9032-77e31126cd7c-kube-api-access-8x4h2" (OuterVolumeSpecName: "kube-api-access-8x4h2") pod "a46aeba5-12a6-4eff-9032-77e31126cd7c" (UID: "a46aeba5-12a6-4eff-9032-77e31126cd7c"). InnerVolumeSpecName "kube-api-access-8x4h2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:45:03 crc kubenswrapper[4899]: I1003 09:45:03.001177 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a46aeba5-12a6-4eff-9032-77e31126cd7c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a46aeba5-12a6-4eff-9032-77e31126cd7c" (UID: "a46aeba5-12a6-4eff-9032-77e31126cd7c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 09:45:03 crc kubenswrapper[4899]: I1003 09:45:03.085279 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8x4h2\" (UniqueName: \"kubernetes.io/projected/a46aeba5-12a6-4eff-9032-77e31126cd7c-kube-api-access-8x4h2\") on node \"crc\" DevicePath \"\"" Oct 03 09:45:03 crc kubenswrapper[4899]: I1003 09:45:03.085321 4899 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a46aeba5-12a6-4eff-9032-77e31126cd7c-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 09:45:03 crc kubenswrapper[4899]: I1003 09:45:03.085333 4899 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a46aeba5-12a6-4eff-9032-77e31126cd7c-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 09:45:03 crc kubenswrapper[4899]: I1003 09:45:03.491426 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq" event={"ID":"a46aeba5-12a6-4eff-9032-77e31126cd7c","Type":"ContainerDied","Data":"024999bd816ac9945b88e0e0cbb591562fb8a5852fe59e52f21c8830fbfa6e5a"} Oct 03 09:45:03 crc kubenswrapper[4899]: I1003 09:45:03.491457 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324745-g64gq" Oct 03 09:45:03 crc kubenswrapper[4899]: I1003 09:45:03.491465 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="024999bd816ac9945b88e0e0cbb591562fb8a5852fe59e52f21c8830fbfa6e5a" Oct 03 09:45:03 crc kubenswrapper[4899]: I1003 09:45:03.945741 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4"] Oct 03 09:45:03 crc kubenswrapper[4899]: I1003 09:45:03.954323 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324700-r2dw4"] Oct 03 09:45:04 crc kubenswrapper[4899]: I1003 09:45:04.538870 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="559c1b4c-3181-42b6-9c75-d6fafcb769ab" path="/var/lib/kubelet/pods/559c1b4c-3181-42b6-9c75-d6fafcb769ab/volumes" Oct 03 09:45:06 crc kubenswrapper[4899]: I1003 09:45:06.547994 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:45:06 crc kubenswrapper[4899]: E1003 09:45:06.548706 4899 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-t2h4g_openshift-machine-config-operator(3e8a7198-81da-475c-ac88-a460ba4064d1)\"" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" Oct 03 09:45:11 crc kubenswrapper[4899]: I1003 09:45:11.554875 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb_ab7bdf03-1685-4b51-b1a5-db0c9c4aa575/util/0.log" Oct 03 09:45:11 crc kubenswrapper[4899]: I1003 09:45:11.675349 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb_ab7bdf03-1685-4b51-b1a5-db0c9c4aa575/util/0.log" Oct 03 09:45:11 crc kubenswrapper[4899]: I1003 09:45:11.721576 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb_ab7bdf03-1685-4b51-b1a5-db0c9c4aa575/pull/0.log" Oct 03 09:45:11 crc kubenswrapper[4899]: I1003 09:45:11.772967 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb_ab7bdf03-1685-4b51-b1a5-db0c9c4aa575/pull/0.log" Oct 03 09:45:11 crc kubenswrapper[4899]: I1003 09:45:11.989199 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb_ab7bdf03-1685-4b51-b1a5-db0c9c4aa575/extract/0.log" Oct 03 09:45:12 crc kubenswrapper[4899]: I1003 09:45:12.009248 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb_ab7bdf03-1685-4b51-b1a5-db0c9c4aa575/util/0.log" Oct 03 09:45:12 crc kubenswrapper[4899]: I1003 09:45:12.022475 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2w9lnb_ab7bdf03-1685-4b51-b1a5-db0c9c4aa575/pull/0.log" Oct 03 09:45:12 crc kubenswrapper[4899]: I1003 09:45:12.172509 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2xq2p_3e316fba-9f8d-4fb6-9adf-0c8842bdf476/extract-utilities/0.log" Oct 03 09:45:12 crc kubenswrapper[4899]: I1003 09:45:12.328372 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2xq2p_3e316fba-9f8d-4fb6-9adf-0c8842bdf476/extract-content/0.log" Oct 03 09:45:12 crc kubenswrapper[4899]: I1003 09:45:12.333428 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2xq2p_3e316fba-9f8d-4fb6-9adf-0c8842bdf476/extract-content/0.log" Oct 03 09:45:12 crc kubenswrapper[4899]: I1003 09:45:12.335078 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2xq2p_3e316fba-9f8d-4fb6-9adf-0c8842bdf476/extract-utilities/0.log" Oct 03 09:45:12 crc kubenswrapper[4899]: I1003 09:45:12.594126 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2xq2p_3e316fba-9f8d-4fb6-9adf-0c8842bdf476/extract-content/0.log" Oct 03 09:45:12 crc kubenswrapper[4899]: I1003 09:45:12.594537 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2xq2p_3e316fba-9f8d-4fb6-9adf-0c8842bdf476/extract-utilities/0.log" Oct 03 09:45:12 crc kubenswrapper[4899]: I1003 09:45:12.802216 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xw5wc_7acc767f-da7a-4e39-90e2-f504c5b40827/extract-utilities/0.log" Oct 03 09:45:13 crc kubenswrapper[4899]: I1003 09:45:13.007249 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xw5wc_7acc767f-da7a-4e39-90e2-f504c5b40827/extract-content/0.log" Oct 03 09:45:13 crc kubenswrapper[4899]: I1003 09:45:13.083804 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xw5wc_7acc767f-da7a-4e39-90e2-f504c5b40827/extract-utilities/0.log" Oct 03 09:45:13 crc kubenswrapper[4899]: I1003 09:45:13.124157 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xw5wc_7acc767f-da7a-4e39-90e2-f504c5b40827/extract-content/0.log" Oct 03 09:45:13 crc kubenswrapper[4899]: I1003 09:45:13.124731 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2xq2p_3e316fba-9f8d-4fb6-9adf-0c8842bdf476/registry-server/0.log" Oct 03 09:45:13 crc kubenswrapper[4899]: I1003 09:45:13.310718 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xw5wc_7acc767f-da7a-4e39-90e2-f504c5b40827/extract-content/0.log" Oct 03 09:45:13 crc kubenswrapper[4899]: I1003 09:45:13.318702 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xw5wc_7acc767f-da7a-4e39-90e2-f504c5b40827/extract-utilities/0.log" Oct 03 09:45:13 crc kubenswrapper[4899]: I1003 09:45:13.602249 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45_c4f4d920-9e4c-4828-89dd-4e95975d5ec8/util/0.log" Oct 03 09:45:13 crc kubenswrapper[4899]: I1003 09:45:13.712196 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xw5wc_7acc767f-da7a-4e39-90e2-f504c5b40827/registry-server/0.log" Oct 03 09:45:13 crc kubenswrapper[4899]: I1003 09:45:13.858728 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45_c4f4d920-9e4c-4828-89dd-4e95975d5ec8/util/0.log" Oct 03 09:45:13 crc kubenswrapper[4899]: I1003 09:45:13.887626 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45_c4f4d920-9e4c-4828-89dd-4e95975d5ec8/pull/0.log" Oct 03 09:45:13 crc kubenswrapper[4899]: I1003 09:45:13.892153 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45_c4f4d920-9e4c-4828-89dd-4e95975d5ec8/pull/0.log" Oct 03 09:45:14 crc kubenswrapper[4899]: I1003 09:45:14.038514 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45_c4f4d920-9e4c-4828-89dd-4e95975d5ec8/pull/0.log" Oct 03 09:45:14 crc kubenswrapper[4899]: I1003 09:45:14.053755 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45_c4f4d920-9e4c-4828-89dd-4e95975d5ec8/util/0.log" Oct 03 09:45:14 crc kubenswrapper[4899]: I1003 09:45:14.097003 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnlt45_c4f4d920-9e4c-4828-89dd-4e95975d5ec8/extract/0.log" Oct 03 09:45:14 crc kubenswrapper[4899]: I1003 09:45:14.213538 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-stjfx_2a4e1059-563b-443d-a7ce-d8af764e8900/marketplace-operator/0.log" Oct 03 09:45:14 crc kubenswrapper[4899]: I1003 09:45:14.309450 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8bmxl_16e11948-6a93-444d-b9f9-c8f60100475c/extract-utilities/0.log" Oct 03 09:45:14 crc kubenswrapper[4899]: I1003 09:45:14.452266 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8bmxl_16e11948-6a93-444d-b9f9-c8f60100475c/extract-content/0.log" Oct 03 09:45:14 crc kubenswrapper[4899]: I1003 09:45:14.496930 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8bmxl_16e11948-6a93-444d-b9f9-c8f60100475c/extract-content/0.log" Oct 03 09:45:14 crc kubenswrapper[4899]: I1003 09:45:14.499682 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8bmxl_16e11948-6a93-444d-b9f9-c8f60100475c/extract-utilities/0.log" Oct 03 09:45:14 crc kubenswrapper[4899]: I1003 09:45:14.713574 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8bmxl_16e11948-6a93-444d-b9f9-c8f60100475c/extract-content/0.log" Oct 03 09:45:14 crc kubenswrapper[4899]: I1003 09:45:14.742128 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8bmxl_16e11948-6a93-444d-b9f9-c8f60100475c/extract-utilities/0.log" Oct 03 09:45:14 crc kubenswrapper[4899]: I1003 09:45:14.891015 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8bmxl_16e11948-6a93-444d-b9f9-c8f60100475c/registry-server/0.log" Oct 03 09:45:14 crc kubenswrapper[4899]: I1003 09:45:14.941408 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hz84h_a8d22cd4-b04a-4b88-acd8-0949b94edd47/extract-utilities/0.log" Oct 03 09:45:15 crc kubenswrapper[4899]: I1003 09:45:15.129589 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hz84h_a8d22cd4-b04a-4b88-acd8-0949b94edd47/extract-utilities/0.log" Oct 03 09:45:15 crc kubenswrapper[4899]: I1003 09:45:15.160723 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hz84h_a8d22cd4-b04a-4b88-acd8-0949b94edd47/extract-content/0.log" Oct 03 09:45:15 crc kubenswrapper[4899]: I1003 09:45:15.170550 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hz84h_a8d22cd4-b04a-4b88-acd8-0949b94edd47/extract-content/0.log" Oct 03 09:45:15 crc kubenswrapper[4899]: I1003 09:45:15.317454 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hz84h_a8d22cd4-b04a-4b88-acd8-0949b94edd47/extract-content/0.log" Oct 03 09:45:15 crc kubenswrapper[4899]: I1003 09:45:15.361970 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hz84h_a8d22cd4-b04a-4b88-acd8-0949b94edd47/extract-utilities/0.log" Oct 03 09:45:15 crc kubenswrapper[4899]: I1003 09:45:15.837485 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hz84h_a8d22cd4-b04a-4b88-acd8-0949b94edd47/registry-server/0.log" Oct 03 09:45:21 crc kubenswrapper[4899]: I1003 09:45:21.527016 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:45:22 crc kubenswrapper[4899]: I1003 09:45:22.681373 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerStarted","Data":"42ec69e38d0aefeb209614dbfbe306a4334353cab436262321077968632fe1de"} Oct 03 09:45:50 crc kubenswrapper[4899]: E1003 09:45:50.310324 4899 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.217:59006->38.129.56.217:44793: write tcp 38.129.56.217:59006->38.129.56.217:44793: write: broken pipe Oct 03 09:45:50 crc kubenswrapper[4899]: I1003 09:45:50.938214 4899 scope.go:117] "RemoveContainer" containerID="d41168deaddca0fd748dfeb7ade6d473ace59560bd489f04a33fe6d000006bfb" Oct 03 09:47:04 crc kubenswrapper[4899]: I1003 09:47:04.592684 4899 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6pt75"] Oct 03 09:47:04 crc kubenswrapper[4899]: E1003 09:47:04.593729 4899 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a46aeba5-12a6-4eff-9032-77e31126cd7c" containerName="collect-profiles" Oct 03 09:47:04 crc kubenswrapper[4899]: I1003 09:47:04.593745 4899 state_mem.go:107] "Deleted CPUSet assignment" podUID="a46aeba5-12a6-4eff-9032-77e31126cd7c" containerName="collect-profiles" Oct 03 09:47:04 crc kubenswrapper[4899]: I1003 09:47:04.594259 4899 memory_manager.go:354] "RemoveStaleState removing state" podUID="a46aeba5-12a6-4eff-9032-77e31126cd7c" containerName="collect-profiles" Oct 03 09:47:04 crc kubenswrapper[4899]: I1003 09:47:04.595775 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6pt75" Oct 03 09:47:04 crc kubenswrapper[4899]: I1003 09:47:04.610043 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6pt75"] Oct 03 09:47:04 crc kubenswrapper[4899]: I1003 09:47:04.706169 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npgdr\" (UniqueName: \"kubernetes.io/projected/7b287c0a-bb82-4ec7-bf9d-782452107a6d-kube-api-access-npgdr\") pod \"redhat-operators-6pt75\" (UID: \"7b287c0a-bb82-4ec7-bf9d-782452107a6d\") " pod="openshift-marketplace/redhat-operators-6pt75" Oct 03 09:47:04 crc kubenswrapper[4899]: I1003 09:47:04.706253 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b287c0a-bb82-4ec7-bf9d-782452107a6d-catalog-content\") pod \"redhat-operators-6pt75\" (UID: \"7b287c0a-bb82-4ec7-bf9d-782452107a6d\") " pod="openshift-marketplace/redhat-operators-6pt75" Oct 03 09:47:04 crc kubenswrapper[4899]: I1003 09:47:04.706385 4899 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b287c0a-bb82-4ec7-bf9d-782452107a6d-utilities\") pod \"redhat-operators-6pt75\" (UID: \"7b287c0a-bb82-4ec7-bf9d-782452107a6d\") " pod="openshift-marketplace/redhat-operators-6pt75" Oct 03 09:47:04 crc kubenswrapper[4899]: I1003 09:47:04.809041 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b287c0a-bb82-4ec7-bf9d-782452107a6d-utilities\") pod \"redhat-operators-6pt75\" (UID: \"7b287c0a-bb82-4ec7-bf9d-782452107a6d\") " pod="openshift-marketplace/redhat-operators-6pt75" Oct 03 09:47:04 crc kubenswrapper[4899]: I1003 09:47:04.809438 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npgdr\" (UniqueName: \"kubernetes.io/projected/7b287c0a-bb82-4ec7-bf9d-782452107a6d-kube-api-access-npgdr\") pod \"redhat-operators-6pt75\" (UID: \"7b287c0a-bb82-4ec7-bf9d-782452107a6d\") " pod="openshift-marketplace/redhat-operators-6pt75" Oct 03 09:47:04 crc kubenswrapper[4899]: I1003 09:47:04.809604 4899 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b287c0a-bb82-4ec7-bf9d-782452107a6d-catalog-content\") pod \"redhat-operators-6pt75\" (UID: \"7b287c0a-bb82-4ec7-bf9d-782452107a6d\") " pod="openshift-marketplace/redhat-operators-6pt75" Oct 03 09:47:04 crc kubenswrapper[4899]: I1003 09:47:04.809967 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b287c0a-bb82-4ec7-bf9d-782452107a6d-utilities\") pod \"redhat-operators-6pt75\" (UID: \"7b287c0a-bb82-4ec7-bf9d-782452107a6d\") " pod="openshift-marketplace/redhat-operators-6pt75" Oct 03 09:47:04 crc kubenswrapper[4899]: I1003 09:47:04.810141 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b287c0a-bb82-4ec7-bf9d-782452107a6d-catalog-content\") pod \"redhat-operators-6pt75\" (UID: \"7b287c0a-bb82-4ec7-bf9d-782452107a6d\") " pod="openshift-marketplace/redhat-operators-6pt75" Oct 03 09:47:04 crc kubenswrapper[4899]: I1003 09:47:04.839325 4899 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npgdr\" (UniqueName: \"kubernetes.io/projected/7b287c0a-bb82-4ec7-bf9d-782452107a6d-kube-api-access-npgdr\") pod \"redhat-operators-6pt75\" (UID: \"7b287c0a-bb82-4ec7-bf9d-782452107a6d\") " pod="openshift-marketplace/redhat-operators-6pt75" Oct 03 09:47:04 crc kubenswrapper[4899]: I1003 09:47:04.924768 4899 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6pt75" Oct 03 09:47:05 crc kubenswrapper[4899]: I1003 09:47:05.436412 4899 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6pt75"] Oct 03 09:47:05 crc kubenswrapper[4899]: I1003 09:47:05.608023 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pt75" event={"ID":"7b287c0a-bb82-4ec7-bf9d-782452107a6d","Type":"ContainerStarted","Data":"2f8c784af972dd55ba568fb84adffd81ae130a5e3ee52d7b7bd6492a69e7d214"} Oct 03 09:47:06 crc kubenswrapper[4899]: I1003 09:47:06.617013 4899 generic.go:334] "Generic (PLEG): container finished" podID="7b287c0a-bb82-4ec7-bf9d-782452107a6d" containerID="818269879f0bbc8e26887eb4a538d2788d60d383de523eae3c1e4aa6aaffc165" exitCode=0 Oct 03 09:47:06 crc kubenswrapper[4899]: I1003 09:47:06.617262 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pt75" event={"ID":"7b287c0a-bb82-4ec7-bf9d-782452107a6d","Type":"ContainerDied","Data":"818269879f0bbc8e26887eb4a538d2788d60d383de523eae3c1e4aa6aaffc165"} Oct 03 09:47:08 crc kubenswrapper[4899]: I1003 09:47:08.639222 4899 generic.go:334] "Generic (PLEG): container finished" podID="7b287c0a-bb82-4ec7-bf9d-782452107a6d" containerID="ba0eca80081be97abd60d53503b734349a739078f48c4da756a52674b2e475f5" exitCode=0 Oct 03 09:47:08 crc kubenswrapper[4899]: I1003 09:47:08.639307 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pt75" event={"ID":"7b287c0a-bb82-4ec7-bf9d-782452107a6d","Type":"ContainerDied","Data":"ba0eca80081be97abd60d53503b734349a739078f48c4da756a52674b2e475f5"} Oct 03 09:47:10 crc kubenswrapper[4899]: I1003 09:47:10.663313 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pt75" event={"ID":"7b287c0a-bb82-4ec7-bf9d-782452107a6d","Type":"ContainerStarted","Data":"519dc3e27fe5ee2d7b9d2c583c7a26975711e536e58d7f82e28e6dcff644a327"} Oct 03 09:47:10 crc kubenswrapper[4899]: I1003 09:47:10.689564 4899 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6pt75" podStartSLOduration=4.050014206 podStartE2EDuration="6.689542743s" podCreationTimestamp="2025-10-03 09:47:04 +0000 UTC" firstStartedPulling="2025-10-03 09:47:06.619112729 +0000 UTC m=+4000.726597682" lastFinishedPulling="2025-10-03 09:47:09.258641266 +0000 UTC m=+4003.366126219" observedRunningTime="2025-10-03 09:47:10.681566021 +0000 UTC m=+4004.789050974" watchObservedRunningTime="2025-10-03 09:47:10.689542743 +0000 UTC m=+4004.797027686" Oct 03 09:47:14 crc kubenswrapper[4899]: I1003 09:47:14.925429 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6pt75" Oct 03 09:47:14 crc kubenswrapper[4899]: I1003 09:47:14.926141 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6pt75" Oct 03 09:47:14 crc kubenswrapper[4899]: I1003 09:47:14.972658 4899 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6pt75" Oct 03 09:47:15 crc kubenswrapper[4899]: I1003 09:47:15.754028 4899 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6pt75" Oct 03 09:47:15 crc kubenswrapper[4899]: I1003 09:47:15.799654 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6pt75"] Oct 03 09:47:16 crc kubenswrapper[4899]: I1003 09:47:16.720168 4899 generic.go:334] "Generic (PLEG): container finished" podID="6c9c1639-66c8-4f63-be49-87f099fa6a39" containerID="91c1387c77c74cff722f4f72111b70fe091939c6ca7250540b45e9c88c6ddfe4" exitCode=0 Oct 03 09:47:16 crc kubenswrapper[4899]: I1003 09:47:16.720243 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-b8z59/must-gather-ssb8q" event={"ID":"6c9c1639-66c8-4f63-be49-87f099fa6a39","Type":"ContainerDied","Data":"91c1387c77c74cff722f4f72111b70fe091939c6ca7250540b45e9c88c6ddfe4"} Oct 03 09:47:16 crc kubenswrapper[4899]: I1003 09:47:16.720809 4899 scope.go:117] "RemoveContainer" containerID="91c1387c77c74cff722f4f72111b70fe091939c6ca7250540b45e9c88c6ddfe4" Oct 03 09:47:17 crc kubenswrapper[4899]: I1003 09:47:17.386386 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-b8z59_must-gather-ssb8q_6c9c1639-66c8-4f63-be49-87f099fa6a39/gather/0.log" Oct 03 09:47:17 crc kubenswrapper[4899]: I1003 09:47:17.728998 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6pt75" podUID="7b287c0a-bb82-4ec7-bf9d-782452107a6d" containerName="registry-server" containerID="cri-o://519dc3e27fe5ee2d7b9d2c583c7a26975711e536e58d7f82e28e6dcff644a327" gracePeriod=2 Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.719310 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6pt75" Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.754441 4899 generic.go:334] "Generic (PLEG): container finished" podID="7b287c0a-bb82-4ec7-bf9d-782452107a6d" containerID="519dc3e27fe5ee2d7b9d2c583c7a26975711e536e58d7f82e28e6dcff644a327" exitCode=0 Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.755104 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pt75" event={"ID":"7b287c0a-bb82-4ec7-bf9d-782452107a6d","Type":"ContainerDied","Data":"519dc3e27fe5ee2d7b9d2c583c7a26975711e536e58d7f82e28e6dcff644a327"} Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.755185 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pt75" event={"ID":"7b287c0a-bb82-4ec7-bf9d-782452107a6d","Type":"ContainerDied","Data":"2f8c784af972dd55ba568fb84adffd81ae130a5e3ee52d7b7bd6492a69e7d214"} Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.755190 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6pt75" Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.755207 4899 scope.go:117] "RemoveContainer" containerID="519dc3e27fe5ee2d7b9d2c583c7a26975711e536e58d7f82e28e6dcff644a327" Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.784810 4899 scope.go:117] "RemoveContainer" containerID="ba0eca80081be97abd60d53503b734349a739078f48c4da756a52674b2e475f5" Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.802942 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b287c0a-bb82-4ec7-bf9d-782452107a6d-catalog-content\") pod \"7b287c0a-bb82-4ec7-bf9d-782452107a6d\" (UID: \"7b287c0a-bb82-4ec7-bf9d-782452107a6d\") " Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.803077 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npgdr\" (UniqueName: \"kubernetes.io/projected/7b287c0a-bb82-4ec7-bf9d-782452107a6d-kube-api-access-npgdr\") pod \"7b287c0a-bb82-4ec7-bf9d-782452107a6d\" (UID: \"7b287c0a-bb82-4ec7-bf9d-782452107a6d\") " Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.803138 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b287c0a-bb82-4ec7-bf9d-782452107a6d-utilities\") pod \"7b287c0a-bb82-4ec7-bf9d-782452107a6d\" (UID: \"7b287c0a-bb82-4ec7-bf9d-782452107a6d\") " Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.804031 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b287c0a-bb82-4ec7-bf9d-782452107a6d-utilities" (OuterVolumeSpecName: "utilities") pod "7b287c0a-bb82-4ec7-bf9d-782452107a6d" (UID: "7b287c0a-bb82-4ec7-bf9d-782452107a6d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.807972 4899 scope.go:117] "RemoveContainer" containerID="818269879f0bbc8e26887eb4a538d2788d60d383de523eae3c1e4aa6aaffc165" Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.810481 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b287c0a-bb82-4ec7-bf9d-782452107a6d-kube-api-access-npgdr" (OuterVolumeSpecName: "kube-api-access-npgdr") pod "7b287c0a-bb82-4ec7-bf9d-782452107a6d" (UID: "7b287c0a-bb82-4ec7-bf9d-782452107a6d"). InnerVolumeSpecName "kube-api-access-npgdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.901702 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b287c0a-bb82-4ec7-bf9d-782452107a6d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7b287c0a-bb82-4ec7-bf9d-782452107a6d" (UID: "7b287c0a-bb82-4ec7-bf9d-782452107a6d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.904911 4899 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b287c0a-bb82-4ec7-bf9d-782452107a6d-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.904931 4899 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b287c0a-bb82-4ec7-bf9d-782452107a6d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.904943 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npgdr\" (UniqueName: \"kubernetes.io/projected/7b287c0a-bb82-4ec7-bf9d-782452107a6d-kube-api-access-npgdr\") on node \"crc\" DevicePath \"\"" Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.916387 4899 scope.go:117] "RemoveContainer" containerID="519dc3e27fe5ee2d7b9d2c583c7a26975711e536e58d7f82e28e6dcff644a327" Oct 03 09:47:19 crc kubenswrapper[4899]: E1003 09:47:18.919855 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"519dc3e27fe5ee2d7b9d2c583c7a26975711e536e58d7f82e28e6dcff644a327\": container with ID starting with 519dc3e27fe5ee2d7b9d2c583c7a26975711e536e58d7f82e28e6dcff644a327 not found: ID does not exist" containerID="519dc3e27fe5ee2d7b9d2c583c7a26975711e536e58d7f82e28e6dcff644a327" Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.920149 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"519dc3e27fe5ee2d7b9d2c583c7a26975711e536e58d7f82e28e6dcff644a327"} err="failed to get container status \"519dc3e27fe5ee2d7b9d2c583c7a26975711e536e58d7f82e28e6dcff644a327\": rpc error: code = NotFound desc = could not find container \"519dc3e27fe5ee2d7b9d2c583c7a26975711e536e58d7f82e28e6dcff644a327\": container with ID starting with 519dc3e27fe5ee2d7b9d2c583c7a26975711e536e58d7f82e28e6dcff644a327 not found: ID does not exist" Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.920182 4899 scope.go:117] "RemoveContainer" containerID="ba0eca80081be97abd60d53503b734349a739078f48c4da756a52674b2e475f5" Oct 03 09:47:19 crc kubenswrapper[4899]: E1003 09:47:18.920685 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba0eca80081be97abd60d53503b734349a739078f48c4da756a52674b2e475f5\": container with ID starting with ba0eca80081be97abd60d53503b734349a739078f48c4da756a52674b2e475f5 not found: ID does not exist" containerID="ba0eca80081be97abd60d53503b734349a739078f48c4da756a52674b2e475f5" Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.920729 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba0eca80081be97abd60d53503b734349a739078f48c4da756a52674b2e475f5"} err="failed to get container status \"ba0eca80081be97abd60d53503b734349a739078f48c4da756a52674b2e475f5\": rpc error: code = NotFound desc = could not find container \"ba0eca80081be97abd60d53503b734349a739078f48c4da756a52674b2e475f5\": container with ID starting with ba0eca80081be97abd60d53503b734349a739078f48c4da756a52674b2e475f5 not found: ID does not exist" Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.920756 4899 scope.go:117] "RemoveContainer" containerID="818269879f0bbc8e26887eb4a538d2788d60d383de523eae3c1e4aa6aaffc165" Oct 03 09:47:19 crc kubenswrapper[4899]: E1003 09:47:18.921347 4899 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"818269879f0bbc8e26887eb4a538d2788d60d383de523eae3c1e4aa6aaffc165\": container with ID starting with 818269879f0bbc8e26887eb4a538d2788d60d383de523eae3c1e4aa6aaffc165 not found: ID does not exist" containerID="818269879f0bbc8e26887eb4a538d2788d60d383de523eae3c1e4aa6aaffc165" Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:18.921405 4899 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"818269879f0bbc8e26887eb4a538d2788d60d383de523eae3c1e4aa6aaffc165"} err="failed to get container status \"818269879f0bbc8e26887eb4a538d2788d60d383de523eae3c1e4aa6aaffc165\": rpc error: code = NotFound desc = could not find container \"818269879f0bbc8e26887eb4a538d2788d60d383de523eae3c1e4aa6aaffc165\": container with ID starting with 818269879f0bbc8e26887eb4a538d2788d60d383de523eae3c1e4aa6aaffc165 not found: ID does not exist" Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:19.097736 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6pt75"] Oct 03 09:47:19 crc kubenswrapper[4899]: I1003 09:47:19.107717 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6pt75"] Oct 03 09:47:19 crc kubenswrapper[4899]: E1003 09:47:19.187874 4899 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b287c0a_bb82_4ec7_bf9d_782452107a6d.slice/crio-2f8c784af972dd55ba568fb84adffd81ae130a5e3ee52d7b7bd6492a69e7d214\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b287c0a_bb82_4ec7_bf9d_782452107a6d.slice\": RecentStats: unable to find data in memory cache]" Oct 03 09:47:20 crc kubenswrapper[4899]: I1003 09:47:20.538366 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b287c0a-bb82-4ec7-bf9d-782452107a6d" path="/var/lib/kubelet/pods/7b287c0a-bb82-4ec7-bf9d-782452107a6d/volumes" Oct 03 09:47:28 crc kubenswrapper[4899]: I1003 09:47:28.921551 4899 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-b8z59/must-gather-ssb8q"] Oct 03 09:47:28 crc kubenswrapper[4899]: I1003 09:47:28.922442 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-b8z59/must-gather-ssb8q" podUID="6c9c1639-66c8-4f63-be49-87f099fa6a39" containerName="copy" containerID="cri-o://58c89a6b7e3f0b0e2ddc98d210a60844d732e7d1046002dc29d2cd8be1ffb78e" gracePeriod=2 Oct 03 09:47:28 crc kubenswrapper[4899]: I1003 09:47:28.934765 4899 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-b8z59/must-gather-ssb8q"] Oct 03 09:47:29 crc kubenswrapper[4899]: I1003 09:47:29.876812 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-b8z59_must-gather-ssb8q_6c9c1639-66c8-4f63-be49-87f099fa6a39/copy/0.log" Oct 03 09:47:29 crc kubenswrapper[4899]: I1003 09:47:29.878419 4899 generic.go:334] "Generic (PLEG): container finished" podID="6c9c1639-66c8-4f63-be49-87f099fa6a39" containerID="58c89a6b7e3f0b0e2ddc98d210a60844d732e7d1046002dc29d2cd8be1ffb78e" exitCode=143 Oct 03 09:47:29 crc kubenswrapper[4899]: I1003 09:47:29.878472 4899 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71aba91da2e0e1b5700f248aaeb51e4a7359388ad11baf37c5a3609262d43b29" Oct 03 09:47:29 crc kubenswrapper[4899]: I1003 09:47:29.891676 4899 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-b8z59_must-gather-ssb8q_6c9c1639-66c8-4f63-be49-87f099fa6a39/copy/0.log" Oct 03 09:47:29 crc kubenswrapper[4899]: I1003 09:47:29.892313 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8z59/must-gather-ssb8q" Oct 03 09:47:30 crc kubenswrapper[4899]: I1003 09:47:30.037741 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6c9c1639-66c8-4f63-be49-87f099fa6a39-must-gather-output\") pod \"6c9c1639-66c8-4f63-be49-87f099fa6a39\" (UID: \"6c9c1639-66c8-4f63-be49-87f099fa6a39\") " Oct 03 09:47:30 crc kubenswrapper[4899]: I1003 09:47:30.038832 4899 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkqwh\" (UniqueName: \"kubernetes.io/projected/6c9c1639-66c8-4f63-be49-87f099fa6a39-kube-api-access-lkqwh\") pod \"6c9c1639-66c8-4f63-be49-87f099fa6a39\" (UID: \"6c9c1639-66c8-4f63-be49-87f099fa6a39\") " Oct 03 09:47:30 crc kubenswrapper[4899]: I1003 09:47:30.049954 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c9c1639-66c8-4f63-be49-87f099fa6a39-kube-api-access-lkqwh" (OuterVolumeSpecName: "kube-api-access-lkqwh") pod "6c9c1639-66c8-4f63-be49-87f099fa6a39" (UID: "6c9c1639-66c8-4f63-be49-87f099fa6a39"). InnerVolumeSpecName "kube-api-access-lkqwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 09:47:30 crc kubenswrapper[4899]: I1003 09:47:30.140879 4899 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkqwh\" (UniqueName: \"kubernetes.io/projected/6c9c1639-66c8-4f63-be49-87f099fa6a39-kube-api-access-lkqwh\") on node \"crc\" DevicePath \"\"" Oct 03 09:47:30 crc kubenswrapper[4899]: I1003 09:47:30.190642 4899 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c9c1639-66c8-4f63-be49-87f099fa6a39-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "6c9c1639-66c8-4f63-be49-87f099fa6a39" (UID: "6c9c1639-66c8-4f63-be49-87f099fa6a39"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 09:47:30 crc kubenswrapper[4899]: I1003 09:47:30.243444 4899 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6c9c1639-66c8-4f63-be49-87f099fa6a39-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 03 09:47:30 crc kubenswrapper[4899]: I1003 09:47:30.536299 4899 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c9c1639-66c8-4f63-be49-87f099fa6a39" path="/var/lib/kubelet/pods/6c9c1639-66c8-4f63-be49-87f099fa6a39/volumes" Oct 03 09:47:30 crc kubenswrapper[4899]: I1003 09:47:30.890552 4899 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-b8z59/must-gather-ssb8q" Oct 03 09:47:42 crc kubenswrapper[4899]: I1003 09:47:42.197868 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:47:42 crc kubenswrapper[4899]: I1003 09:47:42.198386 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:48:12 crc kubenswrapper[4899]: I1003 09:48:12.198678 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:48:12 crc kubenswrapper[4899]: I1003 09:48:12.199268 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:48:42 crc kubenswrapper[4899]: I1003 09:48:42.198457 4899 patch_prober.go:28] interesting pod/machine-config-daemon-t2h4g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 09:48:42 crc kubenswrapper[4899]: I1003 09:48:42.199669 4899 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 09:48:42 crc kubenswrapper[4899]: I1003 09:48:42.199766 4899 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" Oct 03 09:48:42 crc kubenswrapper[4899]: I1003 09:48:42.201252 4899 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"42ec69e38d0aefeb209614dbfbe306a4334353cab436262321077968632fe1de"} pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 09:48:42 crc kubenswrapper[4899]: I1003 09:48:42.201329 4899 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" podUID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerName="machine-config-daemon" containerID="cri-o://42ec69e38d0aefeb209614dbfbe306a4334353cab436262321077968632fe1de" gracePeriod=600 Oct 03 09:48:42 crc kubenswrapper[4899]: I1003 09:48:42.571498 4899 generic.go:334] "Generic (PLEG): container finished" podID="3e8a7198-81da-475c-ac88-a460ba4064d1" containerID="42ec69e38d0aefeb209614dbfbe306a4334353cab436262321077968632fe1de" exitCode=0 Oct 03 09:48:42 crc kubenswrapper[4899]: I1003 09:48:42.571656 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerDied","Data":"42ec69e38d0aefeb209614dbfbe306a4334353cab436262321077968632fe1de"} Oct 03 09:48:42 crc kubenswrapper[4899]: I1003 09:48:42.571883 4899 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-t2h4g" event={"ID":"3e8a7198-81da-475c-ac88-a460ba4064d1","Type":"ContainerStarted","Data":"5c5afa9a3ae3017e4a43a18b6d1ec23ecb61e43f22132cdc8248fd53bbfb6fdd"} Oct 03 09:48:42 crc kubenswrapper[4899]: I1003 09:48:42.571927 4899 scope.go:117] "RemoveContainer" containerID="b8ebbfd8894683da23b68bc146fe862d7b291b11459a49169db4963ed85422ba" Oct 03 09:48:51 crc kubenswrapper[4899]: I1003 09:48:51.064085 4899 scope.go:117] "RemoveContainer" containerID="91c1387c77c74cff722f4f72111b70fe091939c6ca7250540b45e9c88c6ddfe4" Oct 03 09:48:51 crc kubenswrapper[4899]: I1003 09:48:51.121079 4899 scope.go:117] "RemoveContainer" containerID="58c89a6b7e3f0b0e2ddc98d210a60844d732e7d1046002dc29d2cd8be1ffb78e" Oct 03 09:48:51 crc kubenswrapper[4899]: I1003 09:48:51.141650 4899 scope.go:117] "RemoveContainer" containerID="29ca2859256c29e15c87f58d41594e0030e2a4a07c8f64f419b405bd020e8405" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515067716011024451 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015067716012017367 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015067705466016525 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015067705466015475 5ustar corecore